From a6be1948ea14e43c0c2049392ed52055a5656c3e Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Fri, 29 Sep 2023 17:40:12 +0200 Subject: [PATCH 001/147] Deprecate EXPERIMENTAL_ env vars for code hostpots; introduce non-experimental ones (#3659) * Deprecate EXPERIMENTAL_ env vars for code hostpots; introduce non-experimental variants * make sure code hotspots being enabled is consistent with endpoint profiling being enabled --- packages/dd-trace/src/profiling/config.js | 41 ++++- .../dd-trace/src/profiling/profilers/wall.js | 4 + .../dd-trace/test/profiling/config.spec.js | 141 ++++++++++++++---- 3 files changed, 149 insertions(+), 37 deletions(-) diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 85a6d3cdb2b..2c4856f5c59 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -37,6 +37,8 @@ class Config { DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE, DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT, DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES, + DD_PROFILING_CODEHOTSPOTS_ENABLED, + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED, DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED, DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED } = process.env @@ -53,8 +55,6 @@ class Config { Number(DD_PROFILING_UPLOAD_TIMEOUT), 60 * 1000) const sourceMap = coalesce(options.sourceMap, DD_PROFILING_SOURCE_MAP, true) - const endpointCollectionEnabled = coalesce(options.endpointCollection, - DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED, false) const pprofPrefix = coalesce(options.pprofPrefix, DD_PROFILING_PPROF_PREFIX, '') @@ -71,11 +71,25 @@ class Config { tagger.parse({ env, host, service, version, functionname }) ) this.logger = ensureLogger(options.logger) + const logger = this.logger + function logExperimentalVarDeprecation (shortVarName) { + const deprecatedEnvVarName = `DD_PROFILING_EXPERIMENTAL_${shortVarName}` + const v = process.env[deprecatedEnvVarName] + // not null, undefined, or NaN -- same logic as koalas.hasValue + // eslint-disable-next-line no-self-compare + if (v != null && v === v) { + logger.warn(`${deprecatedEnvVarName} is deprecated. Use DD_PROFILING_${shortVarName} instead.`) + } + } this.flushInterval = flushInterval this.uploadTimeout = uploadTimeout this.sourceMap = sourceMap this.debugSourceMaps = isTrue(coalesce(options.debugSourceMaps, DD_PROFILING_DEBUG_SOURCE_MAPS, false)) - this.endpointCollectionEnabled = endpointCollectionEnabled + this.endpointCollectionEnabled = isTrue(coalesce(options.endpointCollection, + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED, + DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED, false)) + logExperimentalVarDeprecation('ENDPOINT_COLLECTION_ENABLED') + this.pprofPrefix = pprofPrefix this.v8ProfilerBugWorkaroundEnabled = isTrue(coalesce(options.v8ProfilerBugWorkaround, DD_PROFILING_V8_PROFILER_BUG_WORKAROUND, true)) @@ -113,8 +127,25 @@ class Config { const profilers = options.profilers ? options.profilers : getProfilers({ DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, DD_PROFILING_PROFILERS }) - this.codeHotspotsEnabled = isTrue(coalesce(options.codeHotspotsEnabled, - DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED, false)) + + function getCodeHotspotsOptionsOr (defvalue) { + return coalesce(options.codeHotspotsEnabled, + DD_PROFILING_CODEHOTSPOTS_ENABLED, + DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED, defvalue) + } + this.codeHotspotsEnabled = isTrue(getCodeHotspotsOptionsOr(false)) + logExperimentalVarDeprecation('CODEHOTSPOTS_ENABLED') + if (this.endpointCollectionEnabled && !this.codeHotspotsEnabled) { + if (getCodeHotspotsOptionsOr(undefined) !== undefined) { + this.logger.warn( + 'Endpoint collection is enabled, but Code Hotspots are disabled. ' + + 'Enable Code Hotspots too for endpoint collection to work.') + this.endpointCollectionEnabled = false + } else { + this.logger.info('Code Hotspots are implicitly enabled by endpoint collection.') + this.codeHotspotsEnabled = true + } + } this.profilers = ensureProfilers(profilers, this) } diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index f8f1b4226dc..57ca7e1b242 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -102,6 +102,10 @@ class NativeWallProfiler { return this._codeHotspotsEnabled } + endpointCollectionEnabled () { + return this._endpointCollectionEnabled + } + start ({ mapper } = {}) { if (this._started) return diff --git a/packages/dd-trace/test/profiling/config.spec.js b/packages/dd-trace/test/profiling/config.spec.js index 835f0e30a9b..9becc3a0429 100644 --- a/packages/dd-trace/test/profiling/config.spec.js +++ b/packages/dd-trace/test/profiling/config.spec.js @@ -14,6 +14,12 @@ const { ConsoleLogger } = require('../../src/profiling/loggers/console') describe('config', () => { let Config let env + const nullLogger = { + debug () { }, + info () { }, + warn () { }, + error () { } + } beforeEach(() => { Config = require('../../src/profiling/config').Config @@ -52,12 +58,7 @@ describe('config', () => { enabled: false, service: 'test', version: '1.2.3-test.0', - logger: { - debug () { }, - info () { }, - warn () { }, - error () { } - }, + logger: nullLogger, exporters: 'agent,file', profilers: 'space,wall', url: 'http://localhost:1234/', @@ -116,12 +117,7 @@ describe('config', () => { DD_PROFILING_PROFILERS: '' } const options = { - logger: { - debug () {}, - info () {}, - warn () {}, - error () {} - } + logger: nullLogger } const config = new Config(options) @@ -133,16 +129,11 @@ describe('config', () => { it('should support profiler config with DD_PROFILING_PROFILERS', () => { process.env = { DD_PROFILING_PROFILERS: 'wall', - DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED: '1', + DD_PROFILING_CODEHOTSPOTS_ENABLED: '1', DD_PROFILING_V8_PROFILER_BUG_WORKAROUND: '0' } const options = { - logger: { - debug () {}, - info () {}, - warn () {}, - error () {} - } + logger: nullLogger } const config = new Config(options) @@ -161,12 +152,7 @@ describe('config', () => { DD_PROFILING_HEAP_ENABLED: '1' } const options = { - logger: { - debug () {}, - info () {}, - warn () {}, - error () {} - } + logger: nullLogger } const config = new Config(options) @@ -181,10 +167,88 @@ describe('config', () => { DD_PROFILING_PROFILERS: 'wall,wall', DD_PROFILING_WALLTIME_ENABLED: '1' } + const options = { + logger: nullLogger + } + + const config = new Config(options) + + expect(config.profilers).to.be.an('array') + expect(config.profilers.length).to.equal(1) + expect(config.profilers[0]).to.be.an.instanceOf(WallProfiler) + }) + + it('should prioritize options over env variables', () => { + process.env = { + DD_PROFILING_PROFILERS: 'space', + DD_PROFILING_CODEHOTSPOTS_ENABLED: '1', + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '1' + } + const options = { + logger: nullLogger, + profilers: ['wall'], + codeHotspotsEnabled: false, + endpointCollection: false + } + + const config = new Config(options) + + expect(config.profilers).to.be.an('array') + expect(config.profilers.length).to.equal(1) + expect(config.profilers[0]).to.be.an.instanceOf(WallProfiler) + expect(config.profilers[0].codeHotspotsEnabled()).false + expect(config.profilers[0].endpointCollectionEnabled()).false + }) + + it('should prioritize non-experimental env variables and warn about experimental ones', () => { + process.env = { + DD_PROFILING_PROFILERS: 'wall', + DD_PROFILING_CODEHOTSPOTS_ENABLED: '0', + DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED: '1', + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '0', + DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED: '1' + } + const warnings = [] const options = { logger: { debug () {}, info () {}, + warn (warning) { + warnings.push(warning) + }, + error () {} + } + } + + const config = new Config(options) + + expect(warnings.length).to.equal(2) + expect(warnings[0]).to.equal( + 'DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED is deprecated. ' + + 'Use DD_PROFILING_ENDPOINT_COLLECTION_ENABLED instead.') + expect(warnings[1]).to.equal( + 'DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED is deprecated. ' + + 'Use DD_PROFILING_CODEHOTSPOTS_ENABLED instead.') + + expect(config.profilers).to.be.an('array') + expect(config.profilers.length).to.equal(1) + expect(config.profilers[0]).to.be.an.instanceOf(WallProfiler) + expect(config.profilers[0].codeHotspotsEnabled()).false + expect(config.profilers[0].endpointCollectionEnabled()).false + }) + + it('should implicitly turn on code hotspots for endpoint profiling when they are not explicitly disabled', () => { + process.env = { + DD_PROFILING_PROFILERS: 'wall', + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '1' + } + const infos = [] + const options = { + logger: { + debug () {}, + info (info) { + infos.push(info) + }, warn () {}, error () {} } @@ -192,33 +256,46 @@ describe('config', () => { const config = new Config(options) + expect(infos.length).to.equal(1) + expect(infos[0]).to.equal('Code Hotspots are implicitly enabled by endpoint collection.') + expect(config.profilers).to.be.an('array') expect(config.profilers.length).to.equal(1) expect(config.profilers[0]).to.be.an.instanceOf(WallProfiler) + expect(config.profilers[0].codeHotspotsEnabled()).true + expect(config.profilers[0].endpointCollectionEnabled()).true }) - it('should prioritize options over env variables', () => { + it('should warn about code hotspots being explicitly disabled with endpoint profiling', () => { process.env = { - DD_PROFILING_PROFILERS: 'space', - DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED: '1' + DD_PROFILING_PROFILERS: 'wall', + DD_PROFILING_CODEHOTSPOTS_ENABLED: '0', + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '1' } + const warnings = [] const options = { logger: { debug () {}, info () {}, - warn () {}, + warn (warning) { + warnings.push(warning) + }, error () {} - }, - profilers: ['wall'], - codeHotspotsEnabled: false + } } const config = new Config(options) + expect(warnings.length).to.equal(1) + expect(warnings[0]).to.equal( + 'Endpoint collection is enabled, but Code Hotspots are disabled. ' + + 'Enable Code Hotspots too for endpoint collection to work.') + expect(config.profilers).to.be.an('array') expect(config.profilers.length).to.equal(1) expect(config.profilers[0]).to.be.an.instanceOf(WallProfiler) expect(config.profilers[0].codeHotspotsEnabled()).false + expect(config.profilers[0].endpointCollectionEnabled()).false }) it('should support tags', () => { From cbb30e87f0035d7a98260205731f6e6e472c1b8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Mon, 2 Oct 2023 07:27:34 +0200 Subject: [PATCH 002/147] [ci-visibility] Improve git unshallow command (#3664) --- packages/dd-trace/src/plugins/util/git.js | 30 ++++++- .../dd-trace/test/plugins/util/git.spec.js | 86 ++++++++++++++++++- 2 files changed, 110 insertions(+), 6 deletions(-) diff --git a/packages/dd-trace/src/plugins/util/git.js b/packages/dd-trace/src/plugins/util/git.js index 4b6048e33af..eb086f2205b 100644 --- a/packages/dd-trace/src/plugins/util/git.js +++ b/packages/dd-trace/src/plugins/util/git.js @@ -61,15 +61,37 @@ function unshallowRepository () { } const defaultRemoteName = sanitizedExec('git', ['config', '--default', 'origin', '--get', 'clone.defaultRemoteName']) const revParseHead = sanitizedExec('git', ['rev-parse', 'HEAD']) - sanitizedExec('git', [ + + const baseGitOptions = [ 'fetch', '--shallow-since="1 month ago"', '--update-shallow', '--filter=blob:none', '--recurse-submodules=no', - defaultRemoteName, - revParseHead - ]) + defaultRemoteName + ] + + try { + execFileSync('git', [ + ...baseGitOptions, + revParseHead + ]) + } catch (e) { + // If the local HEAD is a commit that has not been pushed to the remote, the above command will fail. + log.error(e) + const upstreamRemote = sanitizedExec('git', ['rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{upstream}']) + try { + execFileSync('git', [ + ...baseGitOptions, + upstreamRemote + ]) + } catch (e) { + // If the CI is working on a detached HEAD or branch tracking hasn’t been set up, the above command will fail. + log.error(e) + // We use sanitizedExec here because if this last option fails, we'll give up. + sanitizedExec('git', baseGitOptions) + } + } } function getRepositoryUrl () { diff --git a/packages/dd-trace/test/plugins/util/git.spec.js b/packages/dd-trace/test/plugins/util/git.spec.js index 81411930b37..83c4905968a 100644 --- a/packages/dd-trace/test/plugins/util/git.spec.js +++ b/packages/dd-trace/test/plugins/util/git.spec.js @@ -10,6 +10,7 @@ const path = require('path') const { GIT_REV_LIST_MAX_BUFFER } = require('../../../src/plugins/util/git') const proxyquire = require('proxyquire') const sanitizedExecStub = sinon.stub().returns('') +const execFileSyncStub = sinon.stub().returns('') const { GIT_COMMIT_SHA, @@ -26,10 +27,13 @@ const { CI_WORKSPACE_PATH } = require('../../../src/plugins/util/tags') -const { getGitMetadata } = proxyquire('../../../src/plugins/util/git', +const { getGitMetadata, unshallowRepository } = proxyquire('../../../src/plugins/util/git', { './exec': { - 'sanitizedExec': sanitizedExecStub + sanitizedExec: sanitizedExecStub + }, + 'child_process': { + execFileSync: execFileSyncStub } } ) @@ -239,3 +243,81 @@ describe('generatePackFilesForCommits', () => { expect(packFilesToUpload).to.eql([]) }) }) + +describe('unshallowRepository', () => { + afterEach(() => { + sanitizedExecStub.reset() + execFileSyncStub.reset() + }) + it('works for the usual case', () => { + sanitizedExecStub + .onCall(0).returns( + 'git version 2.39.0' + ) + .onCall(1).returns('origin') + .onCall(2).returns('daede5785233abb1a3cb76b9453d4eb5b98290b3') + + const options = [ + 'fetch', + '--shallow-since="1 month ago"', + '--update-shallow', + '--filter=blob:none', + '--recurse-submodules=no', + 'origin', + 'daede5785233abb1a3cb76b9453d4eb5b98290b3' + ] + + unshallowRepository() + expect(execFileSyncStub).to.have.been.calledWith('git', options) + }) + it('works if the local HEAD is a commit that has not been pushed to the remote', () => { + sanitizedExecStub + .onCall(0).returns( + 'git version 2.39.0' + ) + .onCall(1).returns('origin') + .onCall(2).returns('daede5785233abb1a3cb76b9453d4eb5b98290b3') + .onCall(3).returns('origin/master') + + execFileSyncStub + .onCall(0).throws() + + const options = [ + 'fetch', + '--shallow-since="1 month ago"', + '--update-shallow', + '--filter=blob:none', + '--recurse-submodules=no', + 'origin', + 'origin/master' + ] + + unshallowRepository() + expect(execFileSyncStub).to.have.been.calledWith('git', options) + }) + it('works if the CI is working on a detached HEAD or branch tracking hasn’t been set up', () => { + sanitizedExecStub + .onCall(0).returns( + 'git version 2.39.0' + ) + .onCall(1).returns('origin') + .onCall(2).returns('daede5785233abb1a3cb76b9453d4eb5b98290b3') + .onCall(3).returns('origin/master') + + execFileSyncStub + .onCall(0).throws() + .onCall(1).throws() + + const options = [ + 'fetch', + '--shallow-since="1 month ago"', + '--update-shallow', + '--filter=blob:none', + '--recurse-submodules=no', + 'origin' + ] + + unshallowRepository() + expect(sanitizedExecStub).to.have.been.calledWith('git', options) + }) +}) From fbe8eb9457dff8e2bfe42dc3b45ffc49533a0d42 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Mon, 2 Oct 2023 09:03:52 +0200 Subject: [PATCH 003/147] Support threat detection in nextjs [APPSEC-10719] (#3641) * Instrumentate nextjs and call to waf * Feature implementation, pending to fix the tests * Add appsec nextjs pipeline * execute less tests * Execute tests again * try other approach to instrumentate next app request * Fix lints * Extract body and query in diagnostics channel publish not in subscribe * Fix tests * Fix query monitoring nextjs * small fixes * Remove unnecessary files * Restore next test * fixes * Fix from PR comments * Small changes from PR comments * Replace some exec by fs functions --- .github/workflows/appsec.yml | 25 ++ .gitignore | 3 + .../src/body-parser.js | 3 +- .../datadog-instrumentations/src/express.js | 3 +- packages/datadog-instrumentations/src/next.js | 40 +++ packages/dd-trace/src/appsec/channels.js | 4 +- packages/dd-trace/src/appsec/index.js | 41 ++- .../test/appsec/index.next.plugin.spec.js | 294 ++++++++++++++++++ packages/dd-trace/test/appsec/index.spec.js | 28 +- .../next/app-dir/app/api/test-text/route.js | 9 + .../appsec/next/app-dir/app/api/test/route.js | 16 + .../test/appsec/next/app-dir/app/layout.js | 12 + .../test/appsec/next/app-dir/app/page.js | 5 + .../appsec/next/app-dir/appsec-rules.json | 35 +++ .../test/appsec/next/app-dir/datadog.js | 7 + .../test/appsec/next/app-dir/jsconfig.json | 7 + .../test/appsec/next/app-dir/next.config.js | 12 + .../appsec/next/pages-dir/appsec-rules.json | 35 +++ .../test/appsec/next/pages-dir/datadog.js | 7 + .../test/appsec/next/pages-dir/naming.js | 19 ++ .../test/appsec/next/pages-dir/next.config.js | 6 + .../next/pages-dir/pages/api/test/index.js | 10 + .../appsec/next/pages-dir/public/test.txt | 1 + .../test/appsec/next/pages-dir/server.js | 24 ++ 24 files changed, 621 insertions(+), 25 deletions(-) create mode 100644 packages/dd-trace/test/appsec/index.next.plugin.spec.js create mode 100644 packages/dd-trace/test/appsec/next/app-dir/app/api/test-text/route.js create mode 100644 packages/dd-trace/test/appsec/next/app-dir/app/api/test/route.js create mode 100644 packages/dd-trace/test/appsec/next/app-dir/app/layout.js create mode 100644 packages/dd-trace/test/appsec/next/app-dir/app/page.js create mode 100644 packages/dd-trace/test/appsec/next/app-dir/appsec-rules.json create mode 100644 packages/dd-trace/test/appsec/next/app-dir/datadog.js create mode 100644 packages/dd-trace/test/appsec/next/app-dir/jsconfig.json create mode 100644 packages/dd-trace/test/appsec/next/app-dir/next.config.js create mode 100644 packages/dd-trace/test/appsec/next/pages-dir/appsec-rules.json create mode 100644 packages/dd-trace/test/appsec/next/pages-dir/datadog.js create mode 100644 packages/dd-trace/test/appsec/next/pages-dir/naming.js create mode 100644 packages/dd-trace/test/appsec/next/pages-dir/next.config.js create mode 100644 packages/dd-trace/test/appsec/next/pages-dir/pages/api/test/index.js create mode 100644 packages/dd-trace/test/appsec/next/pages-dir/public/test.txt create mode 100644 packages/dd-trace/test/appsec/next/pages-dir/server.js diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index 7f49b46fe7a..68a55a81e64 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -146,3 +146,28 @@ jobs: - uses: ./.github/actions/node/latest - run: yarn test:appsec:plugins:ci - uses: codecov/codecov-action@v2 + + next: + strategy: + matrix: + node-version: [16] + range: ['>=9.5 <11.1', '>=11.1 <13.2'] + include: + - node-version: 18 + range: '>=13.2' + runs-on: ubuntu-latest + env: + PLUGINS: next + RANGE: ${{ matrix.range }} + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/testagent/start + - uses: ./.github/actions/node/setup + - uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + - run: yarn install + - run: yarn test:appsec:plugins:ci + - if: always() + uses: ./.github/actions/testagent/logs + - uses: codecov/codecov-action@v2 diff --git a/.gitignore b/.gitignore index 6b1c9d5f579..6835a6dd270 100644 --- a/.gitignore +++ b/.gitignore @@ -121,3 +121,6 @@ acmeair-nodejs packages/datadog-plugin-next/test/package.json packages/datadog-plugin-next/test/node_modules packages/datadog-plugin-next/test/yarn.lock +packages/dd-trace/test/appsec/next/*/package.json +packages/dd-trace/test/appsec/next/*/node_modules +packages/dd-trace/test/appsec/next/*/yarn.lock diff --git a/packages/datadog-instrumentations/src/body-parser.js b/packages/datadog-instrumentations/src/body-parser.js index ae2dc94adc0..a73c377ba9a 100644 --- a/packages/datadog-instrumentations/src/body-parser.js +++ b/packages/datadog-instrumentations/src/body-parser.js @@ -10,8 +10,9 @@ function publishRequestBodyAndNext (req, res, next) { return function () { if (bodyParserReadCh.hasSubscribers && req) { const abortController = new AbortController() + const body = req.body - bodyParserReadCh.publish({ req, res, abortController }) + bodyParserReadCh.publish({ req, res, body, abortController }) if (abortController.signal.aborted) return } diff --git a/packages/datadog-instrumentations/src/express.js b/packages/datadog-instrumentations/src/express.js index ffd7d4e2230..b07c38a42fe 100644 --- a/packages/datadog-instrumentations/src/express.js +++ b/packages/datadog-instrumentations/src/express.js @@ -33,8 +33,9 @@ function publishQueryParsedAndNext (req, res, next) { return function () { if (queryParserReadCh.hasSubscribers && req) { const abortController = new AbortController() + const query = req.query - queryParserReadCh.publish({ req, res, abortController }) + queryParserReadCh.publish({ req, res, query, abortController }) if (abortController.signal.aborted) return } diff --git a/packages/datadog-instrumentations/src/next.js b/packages/datadog-instrumentations/src/next.js index 471d1111ece..8f9d2d52959 100644 --- a/packages/datadog-instrumentations/src/next.js +++ b/packages/datadog-instrumentations/src/next.js @@ -10,6 +10,8 @@ const startChannel = channel('apm:next:request:start') const finishChannel = channel('apm:next:request:finish') const errorChannel = channel('apm:next:request:error') const pageLoadChannel = channel('apm:next:page:load') +const bodyParsedChannel = channel('apm:next:body-parsed') +const queryParsedChannel = channel('apm:next:query-parsed') const requests = new WeakSet() @@ -212,3 +214,41 @@ addHook({ return nextServer }) + +addHook({ + name: 'next', + versions: ['>=13'], + file: 'dist/server/web/spec-extension/request.js' +}, request => { + const nextUrlDescriptor = Object.getOwnPropertyDescriptor(request.NextRequest.prototype, 'nextUrl') + shimmer.wrap(nextUrlDescriptor, 'get', function (originalGet) { + return function wrappedGet () { + const nextUrl = originalGet.apply(this, arguments) + if (queryParsedChannel.hasSubscribers) { + const query = {} + for (const key of nextUrl.searchParams.keys()) { + if (!query[key]) { + query[key] = nextUrl.searchParams.getAll(key) + } + } + + queryParsedChannel.publish({ query }) + } + return nextUrl + } + }) + + Object.defineProperty(request.NextRequest.prototype, 'nextUrl', nextUrlDescriptor) + + shimmer.massWrap(request.NextRequest.prototype, ['text', 'json'], function (originalMethod) { + return async function wrappedJson () { + const body = await originalMethod.apply(this, arguments) + bodyParsedChannel.publish({ + body + }) + return body + } + }) + + return request +}) diff --git a/packages/dd-trace/src/appsec/channels.js b/packages/dd-trace/src/appsec/channels.js index b2058f04482..26d3dbb0355 100644 --- a/packages/dd-trace/src/appsec/channels.js +++ b/packages/dd-trace/src/appsec/channels.js @@ -11,5 +11,7 @@ module.exports = { incomingHttpRequestEnd: dc.channel('dd-trace:incomingHttpRequestEnd'), passportVerify: dc.channel('datadog:passport:verify:finish'), queryParser: dc.channel('datadog:query:read:finish'), - setCookieChannel: dc.channel('datadog:iast:set-cookie') + setCookieChannel: dc.channel('datadog:iast:set-cookie'), + nextBodyParsed: dc.channel('apm:next:body-parsed'), + nextQueryParsed: dc.channel('apm:next:query-parsed') } diff --git a/packages/dd-trace/src/appsec/index.js b/packages/dd-trace/src/appsec/index.js index 46d95f0f38c..e68bed40127 100644 --- a/packages/dd-trace/src/appsec/index.js +++ b/packages/dd-trace/src/appsec/index.js @@ -10,7 +10,9 @@ const { incomingHttpRequestStart, incomingHttpRequestEnd, passportVerify, - queryParser + queryParser, + nextBodyParsed, + nextQueryParsed } = require('./channels') const waf = require('./waf') const addresses = require('./addresses') @@ -43,6 +45,8 @@ function enable (_config) { incomingHttpRequestStart.subscribe(incomingHttpStartTranslator) incomingHttpRequestEnd.subscribe(incomingHttpEndTranslator) bodyParser.subscribe(onRequestBodyParsed) + nextBodyParsed.subscribe(onRequestBodyParsed) + nextQueryParsed.subscribe(onRequestQueryParsed) queryParser.subscribe(onRequestQueryParsed) cookieParser.subscribe(onRequestCookieParser) graphqlFinishExecute.subscribe(onGraphqlFinishExecute) @@ -117,6 +121,11 @@ function incomingHttpEndTranslator ({ req, res }) { payload[addresses.HTTP_INCOMING_COOKIES] = req.cookies } + // TODO: no need to analyze it if it was already done by the body-parser hook + if (req.query !== undefined && req.query !== null) { + payload[addresses.HTTP_INCOMING_QUERY] = req.query + } + waf.run(payload, req) waf.disposeContext(req) @@ -124,38 +133,48 @@ function incomingHttpEndTranslator ({ req, res }) { Reporter.finishRequest(req, res) } -function onRequestBodyParsed ({ req, res, abortController }) { +function onRequestBodyParsed ({ req, res, body, abortController }) { + if (body === undefined || body === null) return + + if (!req) { + const store = storage.getStore() + req = store?.req + } + const rootSpan = web.root(req) if (!rootSpan) return - if (req.body === undefined || req.body === null) return - const results = waf.run({ - [addresses.HTTP_INCOMING_BODY]: req.body + [addresses.HTTP_INCOMING_BODY]: body }, req) handleResults(results, req, res, rootSpan, abortController) } -function onRequestQueryParsed ({ req, res, abortController }) { +function onRequestQueryParsed ({ req, res, query, abortController }) { + if (!query || typeof query !== 'object') return + + if (!req) { + const store = storage.getStore() + req = store?.req + } + const rootSpan = web.root(req) if (!rootSpan) return - if (!req.query || typeof req.query !== 'object') return - const results = waf.run({ - [addresses.HTTP_INCOMING_QUERY]: req.query + [addresses.HTTP_INCOMING_QUERY]: query }, req) handleResults(results, req, res, rootSpan, abortController) } function onRequestCookieParser ({ req, res, abortController, cookies }) { + if (!cookies || typeof cookies !== 'object') return + const rootSpan = web.root(req) if (!rootSpan) return - if (!cookies || typeof cookies !== 'object') return - const results = waf.run({ [addresses.HTTP_INCOMING_COOKIES]: cookies }, req) diff --git a/packages/dd-trace/test/appsec/index.next.plugin.spec.js b/packages/dd-trace/test/appsec/index.next.plugin.spec.js new file mode 100644 index 00000000000..560b55eb7c8 --- /dev/null +++ b/packages/dd-trace/test/appsec/index.next.plugin.spec.js @@ -0,0 +1,294 @@ +'use strict' + +const { spawn, execSync } = require('child_process') +const { cpSync, mkdirSync, rmdirSync, unlinkSync } = require('fs') +const getPort = require('get-port') +const axios = require('axios') +const { writeFileSync } = require('fs') +const { satisfies } = require('semver') +const path = require('path') + +const { DD_MAJOR } = require('../../../../version') +const agent = require('../plugins/agent') + +describe('test suite', () => { + let server + let port + + const satisfiesStandalone = version => satisfies(version, '>=12.0.0') + + withVersions('next', 'next', DD_MAJOR >= 4 && '>=11', version => { + const realVersion = require(`${__dirname}/../../../../versions/next@${version}`).version() + + function initApp (appName) { + const appDir = path.join(__dirname, 'next', appName) + + before(async function () { + this.timeout(120 * 1000) // Webpack is very slow and builds on every test run + + const cwd = appDir + + const pkg = require(`${__dirname}/../../../../versions/next@${version}/package.json`) + + if (realVersion.startsWith('10')) { + return this.skip() // TODO: Figure out why 10.x tests fail. + } + delete pkg.workspaces + + // builds fail for next.js 9.5 using node 14 due to webpack issues + // note that webpack version cannot be set in v9.5 in next.config.js so we do it here instead + // the link below highlights the initial support for webpack 5 (used to fix this issue) in next.js 9.5 + // https://nextjs.org/blog/next-9-5#webpack-5-support-beta + if (realVersion.startsWith('9')) pkg.resolutions = { webpack: '^5.0.0' } + + writeFileSync(`${appDir}/package.json`, JSON.stringify(pkg, null, 2)) + + // installing here for standalone purposes, copying `nodules` above was not generating the server file properly + // if there is a way to re-use nodules from somewhere in the versions folder, this `execSync` will be reverted + execSync('yarn install', { cwd }) + + // building in-process makes tests fail for an unknown reason + execSync('yarn exec next build', { + cwd, + env: { + ...process.env, + version + }, + stdio: ['pipe', 'ignore', 'pipe'] + }) + + if (satisfiesStandalone(realVersion)) { + // copy public and static files to the `standalone` folder + // const publicOrigin = `${appDir}/public` + const publicDestination = `${appDir}/.next/standalone/public` + const rulesFileOrigin = `${appDir}/appsec-rules.json` + const rulesFileDestination = `${appDir}/.next/standalone/appsec-rules.json` + + mkdirSync(publicDestination) + cpSync(rulesFileOrigin, rulesFileDestination) + } + }) + + after(function () { + this.timeout(5000) + + const files = [ + 'package.json', + 'yarn.lock' + ] + const filePaths = files.map(file => `${appDir}/${file}`) + filePaths.forEach(path => { + unlinkSync(path) + }) + + const dirs = [ + 'node_modules', + '.next' + ] + const dirPaths = dirs.map(file => `${appDir}/${file}`) + dirPaths.forEach(path => { + rmdirSync(path, { recursive: true, force: true }) + }) + }) + } + + const startServer = ({ appName, serverPath }, schemaVersion = 'v0', defaultToGlobalService = false) => { + const appDir = path.join(__dirname, 'next', appName) + + before(async () => { + port = await getPort() + + return agent.load('next') + }) + + before(function (done) { + this.timeout(40000) + const cwd = appDir + + server = spawn('node', [serverPath], { + cwd, + env: { + ...process.env, + VERSION: version, + PORT: port, + DD_TRACE_AGENT_PORT: agent.server.address().port, + DD_TRACE_SPAN_ATTRIBUTE_SCHEMA: schemaVersion, + DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED: defaultToGlobalService, + NODE_OPTIONS: `--require ${appDir}/datadog.js`, + HOSTNAME: '127.0.0.1' + } + }) + + server.once('error', done) + server.stdout.once('data', () => { + done() + }) + server.stderr.on('data', chunk => process.stderr.write(chunk)) + server.stdout.on('data', chunk => process.stdout.write(chunk)) + }) + + after(async function () { + this.timeout(5000) + + server.kill() + + await agent.close({ ritmReset: false }) + }) + } + + const tests = [ + { + appName: 'pages-dir', + serverPath: 'server' + } + ] + + if (satisfies(realVersion, '>=13.2')) { + tests.push({ + appName: 'app-dir', + serverPath: '.next/standalone/server.js' + }) + } + + tests.forEach(({ appName, serverPath }) => { + describe(`should detect threats in ${appName}`, () => { + initApp(appName) + + startServer({ appName, serverPath }) + + it('in request body', function (done) { + this.timeout(5000) + + function findBodyThreat (traces) { + let attackFound = false + + traces.forEach(trace => { + trace.forEach(span => { + if (span.meta['_dd.appsec.json']) { + attackFound = true + } + }) + }) + + if (attackFound) { + agent.unsubscribe(findBodyThreat) + done() + } + } + + agent.subscribe(findBodyThreat) + axios + .post(`http://127.0.0.1:${port}/api/test`, { + key: 'testattack' + }).catch(e => { done(e) }) + }) + + if (appName === 'app-dir') { + it('in request body with .text() function', function (done) { + this.timeout(5000) + + function findBodyThreat (traces) { + let attackFound = false + + traces.forEach(trace => { + trace.forEach(span => { + if (span.meta['_dd.appsec.json']) { + attackFound = true + } + }) + }) + + if (attackFound) { + agent.unsubscribe(findBodyThreat) + done() + } + } + + agent.subscribe(findBodyThreat) + axios + .post(`http://127.0.0.1:${port}/api/test-text`, { + key: 'testattack' + }).catch(e => { + done(e) + }) + }) + } + + it('in request query', function (done) { + this.timeout(5000) + + function findBodyThreat (traces) { + let attackFound = false + traces.forEach(trace => { + trace.forEach(span => { + if (span.meta['_dd.appsec.json']) { + attackFound = true + } + }) + }) + if (attackFound) { + agent.unsubscribe(findBodyThreat) + done() + } + } + + axios + .get(`http://127.0.0.1:${port}/api/test?param=testattack`) + .catch(e => { done(e) }) + + agent.subscribe(findBodyThreat) + }) + + it('in request query with array params, attack in the second item', function (done) { + this.timeout(5000) + + function findBodyThreat (traces) { + let attackFound = false + traces.forEach(trace => { + trace.forEach(span => { + if (span.meta['_dd.appsec.json']) { + attackFound = true + } + }) + }) + if (attackFound) { + agent.unsubscribe(findBodyThreat) + done() + } + } + + axios + .get(`http://127.0.0.1:${port}/api/test?param[]=safe¶m[]=testattack`) + .catch(e => { done(e) }) + + agent.subscribe(findBodyThreat) + }) + + it('in request query with array params, threat in the first item', function (done) { + this.timeout(5000) + + function findBodyThreat (traces) { + let attackFound = false + traces.forEach(trace => { + trace.forEach(span => { + if (span.meta['_dd.appsec.json']) { + attackFound = true + } + }) + }) + if (attackFound) { + agent.unsubscribe(findBodyThreat) + done() + } + } + + axios + .get(`http://127.0.0.1:${port}/api/test?param[]=testattack¶m[]=safe`) + .catch(e => { done(e) }) + + agent.subscribe(findBodyThreat) + }) + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index 33ba6e8cfd0..643ad0c3c6d 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -316,7 +316,7 @@ describe('AppSec Index', () => { remotePort: 8080 }, body: null, - query: 'string', + query: { queryKey: 'queryValue' }, route: {}, params: 'string', cookies: 'string' @@ -337,7 +337,8 @@ describe('AppSec Index', () => { expect(waf.run).to.have.been.calledOnceWithExactly({ 'server.response.status': 201, - 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 } + 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 }, + 'server.request.query': { queryKey: 'queryValue' } }, req) expect(Reporter.finishRequest).to.have.been.calledOnceWithExactly(req, res) @@ -391,7 +392,8 @@ describe('AppSec Index', () => { 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 }, 'server.request.body': { a: '1' }, 'server.request.path_params': { c: '3' }, - 'server.request.cookies': { d: '4', e: '5' } + 'server.request.cookies': { d: '4', e: '5' }, + 'server.request.query': { b: '2' } }, req) expect(Reporter.finishRequest).to.have.been.calledOnceWithExactly(req, res) }) @@ -449,10 +451,11 @@ describe('AppSec Index', () => { }) it('Should not block with body by default', () => { - req.body = { key: 'value' } + const body = { key: 'value' } + req.body = body sinon.stub(waf, 'run') - bodyParser.publish({ req, res, abortController }) + bodyParser.publish({ req, res, body, abortController }) expect(waf.run).to.have.been.calledOnceWith({ 'server.request.body': { key: 'value' } @@ -462,10 +465,11 @@ describe('AppSec Index', () => { }) it('Should block when it is detected as attack', () => { - req.body = { key: 'value' } + const body = { key: 'value' } + req.body = body sinon.stub(waf, 'run').returns(['block']) - bodyParser.publish({ req, res, abortController }) + bodyParser.publish({ req, res, body, abortController }) expect(waf.run).to.have.been.calledOnceWith({ 'server.request.body': { key: 'value' } @@ -525,10 +529,11 @@ describe('AppSec Index', () => { }) it('Should not block with query by default', () => { - req.query = { key: 'value' } + const query = { key: 'value' } + req.query = query sinon.stub(waf, 'run') - queryParser.publish({ req, res, abortController }) + queryParser.publish({ req, res, query, abortController }) expect(waf.run).to.have.been.calledOnceWith({ 'server.request.query': { key: 'value' } @@ -538,10 +543,11 @@ describe('AppSec Index', () => { }) it('Should block when it is detected as attack', () => { - req.query = { key: 'value' } + const query = { key: 'value' } + req.query = query sinon.stub(waf, 'run').returns(['block']) - queryParser.publish({ req, res, abortController }) + queryParser.publish({ req, res, query, abortController }) expect(waf.run).to.have.been.calledOnceWith({ 'server.request.query': { key: 'value' } diff --git a/packages/dd-trace/test/appsec/next/app-dir/app/api/test-text/route.js b/packages/dd-trace/test/appsec/next/app-dir/app/api/test-text/route.js new file mode 100644 index 00000000000..c5a72eda4d0 --- /dev/null +++ b/packages/dd-trace/test/appsec/next/app-dir/app/api/test-text/route.js @@ -0,0 +1,9 @@ +import { NextResponse } from 'next/server' +export async function POST (request) { + const body = await request.text() + return NextResponse.json({ + now: Date.now(), + cache: 'no-store', + data: body + }) +} diff --git a/packages/dd-trace/test/appsec/next/app-dir/app/api/test/route.js b/packages/dd-trace/test/appsec/next/app-dir/app/api/test/route.js new file mode 100644 index 00000000000..2184cdbfc03 --- /dev/null +++ b/packages/dd-trace/test/appsec/next/app-dir/app/api/test/route.js @@ -0,0 +1,16 @@ +import { NextResponse } from 'next/server' +export async function POST (request) { + const body = await request.json() + return NextResponse.json({ + now: Date.now(), + cache: 'no-store', + data: body + }) +} +export async function GET (request) { + return NextResponse.json({ + now: Date.now(), + cache: 'no-store', + data: request.nextUrl.searchParams + }) +} diff --git a/packages/dd-trace/test/appsec/next/app-dir/app/layout.js b/packages/dd-trace/test/appsec/next/app-dir/app/layout.js new file mode 100644 index 00000000000..f07a8fc923f --- /dev/null +++ b/packages/dd-trace/test/appsec/next/app-dir/app/layout.js @@ -0,0 +1,12 @@ +export const metadata = { + title: 'Create Next App', + description: 'Generated by create next app' +} + +export default function RootLayout ({ children }) { + return ( + + {children} + + ) +} diff --git a/packages/dd-trace/test/appsec/next/app-dir/app/page.js b/packages/dd-trace/test/appsec/next/app-dir/app/page.js new file mode 100644 index 00000000000..f637000ab61 --- /dev/null +++ b/packages/dd-trace/test/appsec/next/app-dir/app/page.js @@ -0,0 +1,5 @@ +export default function Home () { + return ( +
test
+ ) +} diff --git a/packages/dd-trace/test/appsec/next/app-dir/appsec-rules.json b/packages/dd-trace/test/appsec/next/app-dir/appsec-rules.json new file mode 100644 index 00000000000..afbcbc51fda --- /dev/null +++ b/packages/dd-trace/test/appsec/next/app-dir/appsec-rules.json @@ -0,0 +1,35 @@ +{ + "version": "2.2", + "metadata": { + "rules_version": "1.5.0" + }, + "rules": [ + { + "id": "test-rule-id-1", + "name": "test-rule-name-1", + "tags": { + "type": "security_scanner", + "category": "attack_attempt" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + } + ], + "list": [ + "testattack" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": ["lowercase"] + } + ] +} diff --git a/packages/dd-trace/test/appsec/next/app-dir/datadog.js b/packages/dd-trace/test/appsec/next/app-dir/datadog.js new file mode 100644 index 00000000000..5e5978ba197 --- /dev/null +++ b/packages/dd-trace/test/appsec/next/app-dir/datadog.js @@ -0,0 +1,7 @@ +const path = require('path') +module.exports = require('../../../..').init({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'appsec-rules.json') + } +}) diff --git a/packages/dd-trace/test/appsec/next/app-dir/jsconfig.json b/packages/dd-trace/test/appsec/next/app-dir/jsconfig.json new file mode 100644 index 00000000000..2a2e4b3bf8b --- /dev/null +++ b/packages/dd-trace/test/appsec/next/app-dir/jsconfig.json @@ -0,0 +1,7 @@ +{ + "compilerOptions": { + "paths": { + "@/*": ["./*"] + } + } +} diff --git a/packages/dd-trace/test/appsec/next/app-dir/next.config.js b/packages/dd-trace/test/appsec/next/app-dir/next.config.js new file mode 100644 index 00000000000..40ee170113f --- /dev/null +++ b/packages/dd-trace/test/appsec/next/app-dir/next.config.js @@ -0,0 +1,12 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = { + eslint: { + ignoreDuringBuilds: true + }, + experimental: { + appDir: true + }, + output: 'standalone' +} + +module.exports = nextConfig diff --git a/packages/dd-trace/test/appsec/next/pages-dir/appsec-rules.json b/packages/dd-trace/test/appsec/next/pages-dir/appsec-rules.json new file mode 100644 index 00000000000..afbcbc51fda --- /dev/null +++ b/packages/dd-trace/test/appsec/next/pages-dir/appsec-rules.json @@ -0,0 +1,35 @@ +{ + "version": "2.2", + "metadata": { + "rules_version": "1.5.0" + }, + "rules": [ + { + "id": "test-rule-id-1", + "name": "test-rule-name-1", + "tags": { + "type": "security_scanner", + "category": "attack_attempt" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + } + ], + "list": [ + "testattack" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": ["lowercase"] + } + ] +} diff --git a/packages/dd-trace/test/appsec/next/pages-dir/datadog.js b/packages/dd-trace/test/appsec/next/pages-dir/datadog.js new file mode 100644 index 00000000000..5e5978ba197 --- /dev/null +++ b/packages/dd-trace/test/appsec/next/pages-dir/datadog.js @@ -0,0 +1,7 @@ +const path = require('path') +module.exports = require('../../../..').init({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'appsec-rules.json') + } +}) diff --git a/packages/dd-trace/test/appsec/next/pages-dir/naming.js b/packages/dd-trace/test/appsec/next/pages-dir/naming.js new file mode 100644 index 00000000000..8b2f1740410 --- /dev/null +++ b/packages/dd-trace/test/appsec/next/pages-dir/naming.js @@ -0,0 +1,19 @@ +const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') + +const rawExpectedSchema = { + server: { + v0: { + serviceName: () => 'test', + opName: () => 'next.request' + }, + v1: { + serviceName: () => 'test', + opName: () => 'http.server.request' + } + } +} + +module.exports = { + rawExpectedSchema, + expectedSchema: resolveNaming(rawExpectedSchema) +} diff --git a/packages/dd-trace/test/appsec/next/pages-dir/next.config.js b/packages/dd-trace/test/appsec/next/pages-dir/next.config.js new file mode 100644 index 00000000000..f983146e9de --- /dev/null +++ b/packages/dd-trace/test/appsec/next/pages-dir/next.config.js @@ -0,0 +1,6 @@ +module.exports = { + eslint: { + ignoreDuringBuilds: true + }, + output: 'standalone' +} diff --git a/packages/dd-trace/test/appsec/next/pages-dir/pages/api/test/index.js b/packages/dd-trace/test/appsec/next/pages-dir/pages/api/test/index.js new file mode 100644 index 00000000000..538520f5eaf --- /dev/null +++ b/packages/dd-trace/test/appsec/next/pages-dir/pages/api/test/index.js @@ -0,0 +1,10 @@ +// Next.js API route support: https://nextjs.org/docs/api-routes/introduction + +export default async function POST (req, res) { + const body = req.body + res.status(200).json({ + cache: 'no-store', + data: body, + query: req.query + }) +} diff --git a/packages/dd-trace/test/appsec/next/pages-dir/public/test.txt b/packages/dd-trace/test/appsec/next/pages-dir/public/test.txt new file mode 100644 index 00000000000..9daeafb9864 --- /dev/null +++ b/packages/dd-trace/test/appsec/next/pages-dir/public/test.txt @@ -0,0 +1 @@ +test diff --git a/packages/dd-trace/test/appsec/next/pages-dir/server.js b/packages/dd-trace/test/appsec/next/pages-dir/server.js new file mode 100644 index 00000000000..673974ac988 --- /dev/null +++ b/packages/dd-trace/test/appsec/next/pages-dir/server.js @@ -0,0 +1,24 @@ +'use strict' + +const { PORT, HOSTNAME } = process.env + +const { createServer } = require('http') +const { parse } = require('url') +const next = require('next') // eslint-disable-line import/no-extraneous-dependencies + +const app = next({ dir: __dirname, dev: false, quiet: true, hostname: HOSTNAME }) +const handle = app.getRequestHandler() + +app.prepare().then(() => { + const server = createServer((req, res) => { + const parsedUrl = parse(req.url, true) + + if (parsedUrl.path === '/exit') { + server.close() + } else { + handle(req, res, parsedUrl) + } + }).listen(PORT, HOSTNAME, () => { + console.log(server.address()) // eslint-disable-line no-console + }) +}) From 90c1292cb1b8dbf0ec6c1f818d5568d8724a1b03 Mon Sep 17 00:00:00 2001 From: simon-id Date: Mon, 2 Oct 2023 10:34:54 +0200 Subject: [PATCH 004/147] Add NPM provenance signing (#3645) --- .github/workflows/release-3.yml | 4 +++- .github/workflows/release-dev.yml | 4 +++- .github/workflows/release-latest.yml | 4 +++- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release-3.yml b/.github/workflows/release-3.yml index e3367eccebd..8061bcf81ce 100644 --- a/.github/workflows/release-3.yml +++ b/.github/workflows/release-3.yml @@ -13,6 +13,8 @@ jobs: publish: runs-on: ubuntu-latest environment: npm + permissions: + id-token: write env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} steps: @@ -20,7 +22,7 @@ jobs: - uses: actions/setup-node@v3 with: registry-url: 'https://registry.npmjs.org' - - run: npm publish --tag latest-node14 + - run: npm publish --tag latest-node14 --provenance - id: pkg run: | content=`cat ./package.json | tr '\n' ' '` diff --git a/.github/workflows/release-dev.yml b/.github/workflows/release-dev.yml index f080443f35e..7abf239b538 100644 --- a/.github/workflows/release-dev.yml +++ b/.github/workflows/release-dev.yml @@ -9,6 +9,8 @@ jobs: dev_release: runs-on: ubuntu-latest environment: npm + permissions: + id-token: write env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} steps: @@ -22,7 +24,7 @@ jobs: content=`cat ./package.json | tr '\n' ' '` echo "::set-output name=json::$content" - run: npm version --no-git-tag-version ${{ fromJson(steps.pkg.outputs.json).version }}-$(git rev-parse --short HEAD)+${{ github.run_id }}.${{ github.run_attempt }} - - run: npm publish --tag dev + - run: npm publish --tag dev --provenance - run: | git tag --force dev git push origin :refs/tags/dev diff --git a/.github/workflows/release-latest.yml b/.github/workflows/release-latest.yml index 3cc980235a1..613ed456864 100644 --- a/.github/workflows/release-latest.yml +++ b/.github/workflows/release-latest.yml @@ -13,6 +13,8 @@ jobs: publish: runs-on: ubuntu-latest environment: npm + permissions: + id-token: write env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} outputs: @@ -22,7 +24,7 @@ jobs: - uses: actions/setup-node@v3 with: registry-url: 'https://registry.npmjs.org' - - run: npm publish + - run: npm publish --provenance - id: pkg run: | content=`cat ./package.json | tr '\n' ' '` From e6ac93a2534ad18755a9c67968a7713da69be827 Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Tue, 3 Oct 2023 10:45:27 +0200 Subject: [PATCH 005/147] Fix SQLi location when using knex [APPSEC-10688] (#3607) * Fix SQLi location when using knex * Remove onProgress wrapping from knex then * Rename asyncResource * Add spacing in code to improve readability * Avoid using shimmer to wrap then from knex raw result * Detect dialect for SQLi with knex * Instrument knex catch and asCallback --- .github/workflows/appsec.yml | 2 +- packages/datadog-instrumentations/src/knex.js | 63 ++++++- .../iast/analyzers/sql-injection-analyzer.js | 22 ++- .../resources/knex-sql-injection-methods.js | 44 +++++ .../resources/sql-injection-methods.js | 5 +- ...sql-injection-analyzer.knex.plugin.spec.js | 161 ++++++++++++++++++ .../analyzers/sql-injection-analyzer.spec.js | 29 +++- 7 files changed, 321 insertions(+), 5 deletions(-) create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/resources/knex-sql-injection-methods.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index 68a55a81e64..05bfdb8f257 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -80,7 +80,7 @@ jobs: - 5432:5432 env: PG_TEST_NATIVE: 'true' - PLUGINS: pg + PLUGINS: pg|knex SERVICES: postgres steps: - uses: actions/checkout@v2 diff --git a/packages/datadog-instrumentations/src/knex.js b/packages/datadog-instrumentations/src/knex.js index ba0d29c4d9e..1df88ba31a7 100644 --- a/packages/datadog-instrumentations/src/knex.js +++ b/packages/datadog-instrumentations/src/knex.js @@ -1,9 +1,12 @@ 'use strict' -const { addHook } = require('./helpers/instrument') +const { addHook, channel } = require('./helpers/instrument') const { wrapThen } = require('./helpers/promise') const shimmer = require('../../datadog-shimmer') +const startRawQueryCh = channel('datadog:knex:raw:start') +const finishRawQueryCh = channel('datadog:knex:raw:finish') + patch('lib/query/builder.js') patch('lib/raw.js') patch('lib/schema/builder.js') @@ -18,3 +21,61 @@ function patch (file) { return Builder }) } + +addHook({ + name: 'knex', + versions: ['>=2'], + file: 'lib/knex-builder/Knex.js' +}, Knex => { + shimmer.wrap(Knex.Client.prototype, 'raw', raw => function () { + if (!startRawQueryCh.hasSubscribers) { + return raw.apply(this, arguments) + } + + const sql = arguments[0] + + // Skip query done by Knex to get the value used for undefined + if (sql === 'DEFAULT') { + return raw.apply(this, arguments) + } + + function finish () { + finishRawQueryCh.publish() + } + + startRawQueryCh.publish({ sql, dialect: this.dialect }) + + const rawResult = raw.apply(this, arguments) + + shimmer.wrap(rawResult, 'then', originalThen => function () { + arguments[0] = wrapCallbackWithFinish(arguments[0], finish) + arguments[1] = wrapCallbackWithFinish(arguments[1], finish) + + const originalThenResult = originalThen.apply(this, arguments) + + shimmer.wrap(originalThenResult, 'catch', originalCatch => function () { + arguments[0] = wrapCallbackWithFinish(arguments[0], finish) + return originalCatch.apply(this, arguments) + }) + + return originalThenResult + }) + + shimmer.wrap(rawResult, 'asCallback', originalAsCallback => function () { + arguments[0] = wrapCallbackWithFinish(arguments[0], finish) + return originalAsCallback.apply(this, arguments) + }) + + return rawResult + }) + return Knex +}) + +function wrapCallbackWithFinish (callback, finish) { + if (typeof callback !== 'function') return callback + + return function () { + finish() + callback.apply(this, arguments) + } +} diff --git a/packages/dd-trace/src/appsec/iast/analyzers/sql-injection-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/sql-injection-analyzer.js index d0406ec1266..a857839e175 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/sql-injection-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/sql-injection-analyzer.js @@ -8,7 +8,7 @@ const { getIastContext } = require('../iast-context') const { addVulnerability } = require('../vulnerability-reporter') const { getNodeModulesPaths } = require('../path-line') -const EXCLUDED_PATHS = getNodeModulesPaths('mysql', 'mysql2', 'sequelize', 'pg-pool') +const EXCLUDED_PATHS = getNodeModulesPaths('mysql', 'mysql2', 'sequelize', 'pg-pool', 'knex') class SqlInjectionAnalyzer extends InjectionAnalyzer { constructor () { @@ -31,6 +31,12 @@ class SqlInjectionAnalyzer extends InjectionAnalyzer { this.addSub('datadog:mysql:pool:query:start', ({ sql }) => this.getStoreAndAnalyze(sql, 'MYSQL')) this.addSub('datadog:mysql:pool:query:finish', () => this.returnToParentStore()) + + this.addSub('datadog:knex:raw:start', ({ sql, dialect: knexDialect }) => { + const dialect = this.normalizeKnexDialect(knexDialect) + this.getStoreAndAnalyze(sql, dialect) + }) + this.addSub('datadog:knex:raw:finish', () => this.returnToParentStore()) } getStoreAndAnalyze (query, dialect) { @@ -83,6 +89,20 @@ class SqlInjectionAnalyzer extends InjectionAnalyzer { _getExcludedPaths () { return EXCLUDED_PATHS } + + normalizeKnexDialect (knexDialect) { + if (knexDialect === 'postgresql') { + return 'POSTGRES' + } + + if (knexDialect === 'sqlite3') { + return 'SQLITE' + } + + if (typeof knexDialect === 'string') { + return knexDialect.toUpperCase() + } + } } module.exports = new SqlInjectionAnalyzer() diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/knex-sql-injection-methods.js b/packages/dd-trace/test/appsec/iast/analyzers/resources/knex-sql-injection-methods.js new file mode 100644 index 00000000000..876d969280f --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/knex-sql-injection-methods.js @@ -0,0 +1,44 @@ +'use strict' + +function executeKnexRawQuery (knex, sql) { + return knex.raw(sql) +} + +function executeKnexNestedRawQuery (knex, taintedSql, notTaintedSql) { + return knex.raw(notTaintedSql).then(() => { + knex.raw(taintedSql) + }) +} + +function executeKnexNestedRawQueryOnRejectedInThen (knex, taintedSql, sqlToFail) { + return knex.raw(sqlToFail).then( + () => {}, + () => { + knex.raw(taintedSql) + } + ) +} + +function executeKnexNestedRawQueryWitCatch (knex, taintedSql, sqlToFail) { + return knex.raw(sqlToFail) + .then( + () => {} + ) + .catch(() => { + knex.raw(taintedSql) + }) +} + +function executeKnexNestedRawQueryAsCallback (knex, taintedSql, sqlToFail, cb) { + knex.raw(sqlToFail).asCallback(() => { + knex.raw(taintedSql).asCallback(cb) + }) +} + +module.exports = { + executeKnexRawQuery, + executeKnexNestedRawQuery, + executeKnexNestedRawQueryOnRejectedInThen, + executeKnexNestedRawQueryWitCatch, + executeKnexNestedRawQueryAsCallback +} diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/sql-injection-methods.js b/packages/dd-trace/test/appsec/iast/analyzers/resources/sql-injection-methods.js index bdef9db7899..58902882ea9 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/resources/sql-injection-methods.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/sql-injection-methods.js @@ -8,4 +8,7 @@ function executeQueryWithCallback (sql, clientOrPool, cb) { return clientOrPool.query(sql, cb) } -module.exports = { executeQuery, executeQueryWithCallback } +module.exports = { + executeQuery, + executeQueryWithCallback +} diff --git a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js new file mode 100644 index 00000000000..a5dddc6b888 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js @@ -0,0 +1,161 @@ +'use strict' + +const fs = require('fs') +const os = require('os') +const path = require('path') +const semver = require('semver') +const { prepareTestServerForIast } = require('../utils') +const { storage } = require('../../../../../datadog-core') +const iastContextFunctions = require('../../../../src/appsec/iast/iast-context') +const { newTaintedString } = require('../../../../src/appsec/iast/taint-tracking/operations') +const vulnerabilityReporter = require('../../../../src/appsec/iast/vulnerability-reporter') + +describe('sql-injection-analyzer with knex', () => { + withVersions('knex', 'knex', knexVersion => { + if (!semver.satisfies(knexVersion, '>=2')) return + + withVersions('pg', 'pg', pgVersion => { + let knex + + prepareTestServerForIast('knex + pg', + (testThatRequestHasVulnerability, testThatRequestHasNoVulnerability) => { + const srcFilePath = path.join(__dirname, 'resources', 'knex-sql-injection-methods.js') + const dstFilePath = path.join(os.tmpdir(), 'knex-sql-injection-methods.js') + let queryMethods + + beforeEach(() => { + vulnerabilityReporter.clearCache() + + const Knex = require(`../../../../../../versions/knex@${knexVersion}`).get() + knex = Knex({ + client: 'pg', + connection: { + host: '127.0.0.1', + database: 'postgres', + user: 'postgres', + password: 'postgres' + } + }) + + fs.copyFileSync(srcFilePath, dstFilePath) + queryMethods = require(dstFilePath) + }) + + afterEach(() => { + knex.destroy() + fs.unlinkSync(dstFilePath) + }) + + describe('simple raw query', () => { + testThatRequestHasVulnerability(() => { + const store = storage.getStore() + const iastCtx = iastContextFunctions.getIastContext(store) + + let sql = 'SELECT 1' + sql = newTaintedString(iastCtx, sql, 'param', 'Request') + + return queryMethods.executeKnexRawQuery(knex, sql) + }, 'SQL_INJECTION', { + occurrences: 1, + location: { + path: 'knex-sql-injection-methods.js', + line: 4 + } + }) + + testThatRequestHasNoVulnerability(() => { + return knex.raw('SELECT 1') + }, 'SQL_INJECTION') + }) + + describe('nested raw query', () => { + testThatRequestHasVulnerability(() => { + const store = storage.getStore() + const iastCtx = iastContextFunctions.getIastContext(store) + + let taintedSql = 'SELECT 1' + taintedSql = newTaintedString(iastCtx, taintedSql, 'param', 'Request') + + const notTaintedSql = 'SELECT 1' + + return queryMethods.executeKnexNestedRawQuery(knex, taintedSql, notTaintedSql) + }, 'SQL_INJECTION', { + occurrences: 1, + location: { + path: 'knex-sql-injection-methods.js', + line: 9 + } + }) + }) + + describe('nested raw query - onRejected as then argument', () => { + testThatRequestHasVulnerability(() => { + const store = storage.getStore() + const iastCtx = iastContextFunctions.getIastContext(store) + + let taintedSql = 'SELECT 1' + taintedSql = newTaintedString(iastCtx, taintedSql, 'param', 'Request') + + const sqlToFail = 'SELECT * FROM NON_EXISTSING_TABLE' + + return queryMethods.executeKnexNestedRawQueryOnRejectedInThen(knex, taintedSql, sqlToFail) + }, 'SQL_INJECTION', { + occurrences: 1, + location: { + path: 'knex-sql-injection-methods.js', + line: 17 + } + }) + }) + + describe('nested raw query - with catch', () => { + testThatRequestHasVulnerability(() => { + const store = storage.getStore() + const iastCtx = iastContextFunctions.getIastContext(store) + + let taintedSql = 'SELECT 1' + taintedSql = newTaintedString(iastCtx, taintedSql, 'param', 'Request') + + const sqlToFail = 'SELECT * FROM NON_EXISTSING_TABLE' + + return queryMethods.executeKnexNestedRawQueryWitCatch(knex, taintedSql, sqlToFail) + }, 'SQL_INJECTION', { + occurrences: 1, + location: { + path: 'knex-sql-injection-methods.js', + line: 28 + } + }) + }) + + describe('nested raw query - asCallback', () => { + testThatRequestHasVulnerability(() => { + return new Promise((resolve, reject) => { + const store = storage.getStore() + const iastCtx = iastContextFunctions.getIastContext(store) + + let taintedSql = 'SELECT 1' + taintedSql = newTaintedString(iastCtx, taintedSql, 'param', 'Request') + + const sqlToFail = 'SELECT * FROM NON_EXISTSING_TABLE' + + queryMethods.executeKnexNestedRawQueryAsCallback(knex, taintedSql, sqlToFail, (err, result) => { + if (err) { + reject(err) + } else { + resolve(result) + } + }) + }) + }, 'SQL_INJECTION', { + occurrences: 1, + location: { + path: 'knex-sql-injection-methods.js', + line: 34 + } + }) + }) + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js index b8d72613375..e73e96d78cf 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js @@ -29,7 +29,7 @@ describe('sql-injection-analyzer', () => { sqlInjectionAnalyzer.configure(true) it('should subscribe to mysql, mysql2 and pg start query channel', () => { - expect(sqlInjectionAnalyzer._subscriptions).to.have.lengthOf(9) + expect(sqlInjectionAnalyzer._subscriptions).to.have.lengthOf(11) expect(sqlInjectionAnalyzer._subscriptions[0]._channel.name).to.equals('apm:mysql:query:start') expect(sqlInjectionAnalyzer._subscriptions[1]._channel.name).to.equals('apm:mysql2:query:start') expect(sqlInjectionAnalyzer._subscriptions[2]._channel.name).to.equals('apm:pg:query:start') @@ -39,6 +39,8 @@ describe('sql-injection-analyzer', () => { expect(sqlInjectionAnalyzer._subscriptions[6]._channel.name).to.equals('datadog:pg:pool:query:finish') expect(sqlInjectionAnalyzer._subscriptions[7]._channel.name).to.equals('datadog:mysql:pool:query:start') expect(sqlInjectionAnalyzer._subscriptions[8]._channel.name).to.equals('datadog:mysql:pool:query:finish') + expect(sqlInjectionAnalyzer._subscriptions[9]._channel.name).to.equals('datadog:knex:raw:start') + expect(sqlInjectionAnalyzer._subscriptions[10]._channel.name).to.equals('datadog:knex:raw:finish') }) it('should not detect vulnerability when no query', () => { @@ -162,4 +164,29 @@ describe('sql-injection-analyzer', () => { expect(analyze).to.be.calledOnceWith('SELECT 1') }) }) + + describe('knex dialects', () => { + const sqlInjectionAnalyzer = require('../../../../src/appsec/iast/analyzers/sql-injection-analyzer') + + const knexDialects = { + 'mssql': 'MSSQL', + 'oracle': 'ORACLE', + 'mysql': 'MYSQL', + 'redshift': 'REDSHIFT', + 'postgresql': 'POSTGRES', + 'sqlite3': 'SQLITE' + } + + Object.keys(knexDialects).forEach((knexDialect) => { + it(`should normalize knex dialect ${knexDialect} to uppercase`, () => { + const normalizedDialect = sqlInjectionAnalyzer.normalizeKnexDialect(knexDialect) + expect(normalizedDialect).to.equals(knexDialects[knexDialect]) + }) + }) + + it('should not fail when normalizing a non string knex dialect', () => { + const normalizedDialect = sqlInjectionAnalyzer.normalizeKnexDialect() + expect(normalizedDialect).to.be.undefined + }) + }) }) From 5bb45e28576ea9db19c723e49e7cd6b0efbc78a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 3 Oct 2023 10:55:34 +0200 Subject: [PATCH 006/147] [ci-visibility] Fix unshallow logic (#3668) --- packages/dd-trace/src/plugins/util/git.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/dd-trace/src/plugins/util/git.js b/packages/dd-trace/src/plugins/util/git.js index eb086f2205b..6746ebd9343 100644 --- a/packages/dd-trace/src/plugins/util/git.js +++ b/packages/dd-trace/src/plugins/util/git.js @@ -75,7 +75,7 @@ function unshallowRepository () { execFileSync('git', [ ...baseGitOptions, revParseHead - ]) + ], { stdio: 'pipe' }) } catch (e) { // If the local HEAD is a commit that has not been pushed to the remote, the above command will fail. log.error(e) @@ -84,7 +84,7 @@ function unshallowRepository () { execFileSync('git', [ ...baseGitOptions, upstreamRemote - ]) + ], { stdio: 'pipe' }) } catch (e) { // If the CI is working on a detached HEAD or branch tracking hasn’t been set up, the above command will fail. log.error(e) From 6985022bd586f7b17b23620b924509dd652034cd Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Tue, 3 Oct 2023 11:28:48 +0200 Subject: [PATCH 007/147] Change source origin from HTTP_REQUEST_PATH to HTTP_REQUEST_URI (#3644) --- .../iast/analyzers/unvalidated-redirect-analyzer.js | 6 +++--- .../dd-trace/src/appsec/iast/taint-tracking/plugin.js | 8 ++++---- .../src/appsec/iast/taint-tracking/source-types.js | 3 ++- .../analyzers/unvalidated-redirect-analyzer.spec.js | 6 +++--- .../test/appsec/iast/taint-tracking/plugin.spec.js | 8 ++++---- .../sources/taint-tracking.express.plugin.spec.js | 10 +++++----- .../dd-trace/test/appsec/iast/telemetry/index.spec.js | 2 +- 7 files changed, 22 insertions(+), 21 deletions(-) diff --git a/packages/dd-trace/src/appsec/iast/analyzers/unvalidated-redirect-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/unvalidated-redirect-analyzer.js index 5cb27659945..2895e44876a 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/unvalidated-redirect-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/unvalidated-redirect-analyzer.js @@ -6,8 +6,8 @@ const { getNodeModulesPaths } = require('../path-line') const { getRanges } = require('../taint-tracking/operations') const { HTTP_REQUEST_HEADER_VALUE, - HTTP_REQUEST_PATH, - HTTP_REQUEST_PATH_PARAM + HTTP_REQUEST_PATH_PARAM, + HTTP_REQUEST_URI } = require('../taint-tracking/source-types') const EXCLUDED_PATHS = getNodeModulesPaths('express/lib/response.js') @@ -56,7 +56,7 @@ class UnvalidatedRedirectAnalyzer extends InjectionAnalyzer { } _isUrl (range) { - return range.iinfo.type === HTTP_REQUEST_PATH + return range.iinfo.type === HTTP_REQUEST_URI } _getExcludedPaths () { diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/plugin.js b/packages/dd-trace/src/appsec/iast/taint-tracking/plugin.js index b7fd1b6cec2..29cbb6526e1 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/plugin.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/plugin.js @@ -11,8 +11,8 @@ const { HTTP_REQUEST_HEADER_VALUE, HTTP_REQUEST_HEADER_NAME, HTTP_REQUEST_PARAMETER, - HTTP_REQUEST_PATH, - HTTP_REQUEST_PATH_PARAM + HTTP_REQUEST_PATH_PARAM, + HTTP_REQUEST_URI } = require('./source-types') class TaintTrackingPlugin extends SourceIastPlugin { @@ -93,9 +93,9 @@ class TaintTrackingPlugin extends SourceIastPlugin { taintUrl (req, iastContext) { this.execSource({ handler: function () { - req.url = newTaintedString(iastContext, req.url, 'req.url', HTTP_REQUEST_PATH) + req.url = newTaintedString(iastContext, req.url, HTTP_REQUEST_URI, HTTP_REQUEST_URI) }, - tag: [HTTP_REQUEST_PATH], + tag: [HTTP_REQUEST_URI], iastContext }) } diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/source-types.js b/packages/dd-trace/src/appsec/iast/taint-tracking/source-types.js index aad90ef6ad3..2a3739515d8 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/source-types.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/source-types.js @@ -8,5 +8,6 @@ module.exports = { HTTP_REQUEST_HEADER_VALUE: 'http.request.header', HTTP_REQUEST_PARAMETER: 'http.request.parameter', HTTP_REQUEST_PATH: 'http.request.path', - HTTP_REQUEST_PATH_PARAM: 'http.request.path.parameter' + HTTP_REQUEST_PATH_PARAM: 'http.request.path.parameter', + HTTP_REQUEST_URI: 'http.request.uri' } diff --git a/packages/dd-trace/test/appsec/iast/analyzers/unvalidated-redirect-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/unvalidated-redirect-analyzer.spec.js index b80e6d35351..c941f007adb 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/unvalidated-redirect-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/unvalidated-redirect-analyzer.spec.js @@ -6,8 +6,8 @@ const overheadController = require('../../../../src/appsec/iast/overhead-control const { HTTP_REQUEST_HEADER_VALUE, HTTP_REQUEST_PARAMETER, - HTTP_REQUEST_PATH, - HTTP_REQUEST_PATH_PARAM + HTTP_REQUEST_PATH_PARAM, + HTTP_REQUEST_URI } = require('../../../../src/appsec/iast/taint-tracking/source-types') describe('unvalidated-redirect-analyzer', () => { @@ -46,7 +46,7 @@ describe('unvalidated-redirect-analyzer', () => { } const URL_RANGE = { iinfo: { - type: HTTP_REQUEST_PATH, + type: HTTP_REQUEST_URI, parameterName: 'path' } } diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js index b464b9f2f60..68964910c89 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js @@ -9,8 +9,8 @@ const { HTTP_REQUEST_COOKIE_NAME, HTTP_REQUEST_HEADER_NAME, HTTP_REQUEST_HEADER_VALUE, - HTTP_REQUEST_PATH, - HTTP_REQUEST_PATH_PARAM + HTTP_REQUEST_PATH_PARAM, + HTTP_REQUEST_URI } = require('../../../../src/appsec/iast/taint-tracking/source-types') const middlewareNextChannel = dc.channel('apm:express:middleware:next') @@ -251,8 +251,8 @@ describe('IAST Taint tracking plugin', () => { expect(taintTrackingOperations.newTaintedString).to.be.calledOnceWith( iastContext, req.url, - 'req.url', - HTTP_REQUEST_PATH + HTTP_REQUEST_URI, + HTTP_REQUEST_URI ) }) }) diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/sources/taint-tracking.express.plugin.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/sources/taint-tracking.express.plugin.spec.js index 020798b0299..7e1626a2b6f 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/sources/taint-tracking.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/sources/taint-tracking.express.plugin.spec.js @@ -10,11 +10,11 @@ const iast = require('../../../../../src/appsec/iast') const iastContextFunctions = require('../../../../../src/appsec/iast/iast-context') const { isTainted, getRanges } = require('../../../../../src/appsec/iast/taint-tracking/operations') const { - HTTP_REQUEST_PATH, - HTTP_REQUEST_PATH_PARAM + HTTP_REQUEST_PATH_PARAM, + HTTP_REQUEST_URI } = require('../../../../../src/appsec/iast/taint-tracking/source-types') -describe('Path sourcing with express', () => { +describe('URI sourcing with express', () => { let express let appListener @@ -47,7 +47,7 @@ describe('Path sourcing with express', () => { iast.disable() }) - it('should taint path', done => { + it('should taint uri', done => { const app = express() app.get('/path/*', (req, res) => { const store = storage.getStore() @@ -55,7 +55,7 @@ describe('Path sourcing with express', () => { const isPathTainted = isTainted(iastContext, req.url) expect(isPathTainted).to.be.true const taintedPathValueRanges = getRanges(iastContext, req.url) - expect(taintedPathValueRanges[0].iinfo.type).to.be.equal(HTTP_REQUEST_PATH) + expect(taintedPathValueRanges[0].iinfo.type).to.be.equal(HTTP_REQUEST_URI) res.status(200).send() }) diff --git a/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js b/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js index 250e9d5d2c8..8fcb9752f3d 100644 --- a/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js @@ -232,7 +232,7 @@ describe('Telemetry', () => { it('should have url source execution metric', (done) => { agent .use(traces => { - expect(traces[0][0].metrics['_dd.iast.telemetry.executed.source.http_request_path']).to.be.equal(1) + expect(traces[0][0].metrics['_dd.iast.telemetry.executed.source.http_request_uri']).to.be.equal(1) }) .then(done) .catch(done) From 3ed12f46cdf9debe5f02b0043b7ac061d55694b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 3 Oct 2023 12:20:59 +0200 Subject: [PATCH 008/147] Fix flakiness in `serverless-performance` (#3672) --- .github/workflows/serverless-performance.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/serverless-performance.yml b/.github/workflows/serverless-performance.yml index 2eb26065864..47c330ddc4f 100644 --- a/.github/workflows/serverless-performance.yml +++ b/.github/workflows/serverless-performance.yml @@ -22,10 +22,13 @@ jobs: with: repository: DataDog/datadog-lambda-js path: datadog-lambda-js + - name: Install node-gyp + run: | + yarn global add node-gyp - name: Update package.json to the current ref run: | cd datadog-lambda-js - yarn add https://github.com/DataDog/dd-trace-js#refs/heads/$GITHUB_HEAD_REF --save-dev + yarn add --dev https://github.com/DataDog/dd-trace-js#refs/heads/${GITHUB_HEAD_REF} - name: Build the layer env: NODE_VERSION: ${{ matrix.node-version }} From 35605f071c4eb518a0237968cd8454f2fac87b69 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Tue, 3 Oct 2023 15:01:27 +0200 Subject: [PATCH 009/147] Prevent sending query address twice to the waf (#3671) * Prevent sending query address twice to the waf * Small changes + test * Fix invalid query type test and behaviour * Comment in the PR --- packages/dd-trace/src/appsec/index.js | 3 +-- .../src/appsec/waf/waf_context_wrapper.js | 15 ++++++++++- packages/dd-trace/test/appsec/index.spec.js | 5 ++-- .../appsec/waf/waf_context_wrapper.spec.js | 26 +++++++++++++++++++ 4 files changed, 43 insertions(+), 6 deletions(-) create mode 100644 packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js diff --git a/packages/dd-trace/src/appsec/index.js b/packages/dd-trace/src/appsec/index.js index e68bed40127..41eba3bdd13 100644 --- a/packages/dd-trace/src/appsec/index.js +++ b/packages/dd-trace/src/appsec/index.js @@ -121,8 +121,7 @@ function incomingHttpEndTranslator ({ req, res }) { payload[addresses.HTTP_INCOMING_COOKIES] = req.cookies } - // TODO: no need to analyze it if it was already done by the body-parser hook - if (req.query !== undefined && req.query !== null) { + if (req.query && typeof req.query === 'object') { payload[addresses.HTTP_INCOMING_QUERY] = req.query } diff --git a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js index d9b176ff531..801949b8563 100644 --- a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js +++ b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js @@ -2,6 +2,12 @@ const log = require('../../log') const Reporter = require('../reporter') +const addresses = require('../addresses') + +// TODO: remove once ephemeral addresses are implemented +const preventDuplicateAddresses = new Set([ + addresses.HTTP_INCOMING_QUERY +]) class WAFContextWrapper { constructor (ddwafContext, requiredAddresses, wafTimeout, wafVersion, rulesVersion) { @@ -10,16 +16,21 @@ class WAFContextWrapper { this.wafTimeout = wafTimeout this.wafVersion = wafVersion this.rulesVersion = rulesVersion + this.addressesToSkip = new Set() } run (params) { const inputs = {} let someInputAdded = false + const newAddressesToSkip = new Set(this.addressesToSkip) // TODO: possible optimizaion: only send params that haven't already been sent with same value to this wafContext for (const key of Object.keys(params)) { - if (this.requiredAddresses.has(key)) { + if (this.requiredAddresses.has(key) && !this.addressesToSkip.has(key)) { inputs[key] = params[key] + if (preventDuplicateAddresses.has(key)) { + newAddressesToSkip.add(key) + } someInputAdded = true } } @@ -33,6 +44,8 @@ class WAFContextWrapper { const end = process.hrtime.bigint() + this.addressesToSkip = newAddressesToSkip + const ruleTriggered = !!result.events?.length const blockTriggered = result.actions?.includes('block') diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index 643ad0c3c6d..0a5cc6c4add 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -316,7 +316,7 @@ describe('AppSec Index', () => { remotePort: 8080 }, body: null, - query: { queryKey: 'queryValue' }, + query: 'string', route: {}, params: 'string', cookies: 'string' @@ -337,8 +337,7 @@ describe('AppSec Index', () => { expect(waf.run).to.have.been.calledOnceWithExactly({ 'server.response.status': 201, - 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 }, - 'server.request.query': { queryKey: 'queryValue' } + 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 } }, req) expect(Reporter.finishRequest).to.have.been.calledOnceWithExactly(req, res) diff --git a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js new file mode 100644 index 00000000000..d99300515a7 --- /dev/null +++ b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js @@ -0,0 +1,26 @@ +'use strict' + +const WAFContextWrapper = require('../../../src/appsec/waf/waf_context_wrapper') +const addresses = require('../../../src/appsec/addresses') + +describe('WAFContextWrapper', () => { + it('Should send HTTP_INCOMING_QUERY only once', () => { + const requiredAddresses = new Set([ + addresses.HTTP_INCOMING_QUERY + ]) + const ddwafContext = { + run: sinon.stub() + } + const wafContextWrapper = new WAFContextWrapper(ddwafContext, requiredAddresses, + 1000, '1.14.0', '1.8.0') + + const payload = { + [addresses.HTTP_INCOMING_QUERY]: { key: 'value' } + } + + wafContextWrapper.run(payload) + wafContextWrapper.run(payload) + + expect(ddwafContext.run).to.have.been.calledOnceWithExactly(payload, 1000) + }) +}) From 00583f5a3db0eef6789124e3b8cfb9ddd47fc929 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 3 Oct 2023 15:30:02 +0200 Subject: [PATCH 010/147] fix integration tests (#3675) --- integration-tests/helpers.js | 4 ++-- integration-tests/startup.spec.js | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/integration-tests/helpers.js b/integration-tests/helpers.js index f75d50b67f8..4be08654fb3 100644 --- a/integration-tests/helpers.js +++ b/integration-tests/helpers.js @@ -208,10 +208,10 @@ async function createSandbox (dependencies = [], isGitRepo = false, await exec(`yarn pack --filename ${out}`) // TODO: cache this await exec(`yarn add ${allDependencies.join(' ')}`, { cwd: folder, env: restOfEnv }) - integrationTestsPaths.forEach(async (path) => { + for (const path of integrationTestsPaths) { await exec(`cp -R ${path} ${folder}`) await exec(`sync ${folder}`) - }) + } if (followUpCommand) { await exec(followUpCommand, { cwd: folder, env: restOfEnv }) diff --git a/integration-tests/startup.spec.js b/integration-tests/startup.spec.js index 4e694f905d8..4033bc65ee9 100644 --- a/integration-tests/startup.spec.js +++ b/integration-tests/startup.spec.js @@ -129,7 +129,9 @@ describe('startup', () => { }) it('works for hostname and port', async () => { - proc = await spawnProc(startupTestFile) + proc = await spawnProc(startupTestFile, { + cwd + }) return curlAndAssertMessage(agent, proc, ({ headers, payload }) => { assert.propertyVal(headers, 'host', '127.0.0.1:8126') assert.isArray(payload) From 0ec4580a52e2a708206ada619d3a0c920aa5ef10 Mon Sep 17 00:00:00 2001 From: Jordi Bertran de Balanda Date: Fri, 29 Sep 2023 13:12:19 +0200 Subject: [PATCH 011/147] add _service to fake tracer --- benchmark/sirun/exporting-pipeline/index.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/benchmark/sirun/exporting-pipeline/index.js b/benchmark/sirun/exporting-pipeline/index.js index 8537c3b0a9e..ce0852b877e 100644 --- a/benchmark/sirun/exporting-pipeline/index.js +++ b/benchmark/sirun/exporting-pipeline/index.js @@ -45,7 +45,8 @@ function createSpan (parent) { } const span = { context: () => context, - tracer: () => ({}), + tracer: () => { return { _service: 'exporting-pipeline-sirun' } }, + setTag: () => {}, _startTime: 1415926, _duration: 100 } From a9aa1766891b587b4ba75f6265f1609d8d77ebbc Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Wed, 4 Oct 2023 09:54:23 +0200 Subject: [PATCH 012/147] Mongodb nosqli detection (#3483) --- .github/workflows/appsec.yml | 40 ++ .github/workflows/plugins.yml | 2 +- package.json | 2 +- .../src/express-mongo-sanitize.js | 45 ++ .../src/helpers/hooks.js | 3 +- .../datadog-instrumentations/src/mongodb.js | 63 +++ .../datadog-instrumentations/src/mongoose.js | 141 ++++- .../test/express-mongo-sanitize.spec.js | 210 +++++++ .../test/mongoose.spec.js | 513 ++++++++++++++++++ .../src/appsec/iast/analyzers/analyzers.js | 1 + .../nosql-injection-mongodb-analyzer.js | 166 ++++++ .../iast/analyzers/vulnerability-analyzer.js | 3 +- .../dd-trace/src/appsec/iast/iast-plugin.js | 4 + .../appsec/iast/taint-tracking/operations.js | 37 +- .../taint-tracking/secure-marks-generator.js | 13 + .../json-sensitive-analyzer.js | 16 + .../evidence-redaction/sensitive-handler.js | 7 +- .../evidence-redaction/sensitive-regex.js | 9 + .../iast/vulnerabilities-formatter/index.js | 14 +- .../iast/vulnerabilities-formatter/utils.js | 169 ++++++ .../src/appsec/iast/vulnerabilities.js | 1 + ...yzer.express-mongo-sanitize.plugin.spec.js | 151 ++++++ ...n-mongodb-analyzer.mongoose.plugin.spec.js | 139 +++++ .../nosql-injection-mongodb-analyzer.spec.js | 127 +++++ .../resources/mongodb-vulnerable-method.js | 6 + .../resources/mongoose-vulnerable-method.js | 5 + .../secure-marks-generator.spec.js | 18 + packages/dd-trace/test/appsec/iast/utils.js | 46 +- .../resources/evidence-redaction-suite.json | 362 ++++++++++++ .../vulnerability-formatter/utils.spec.js | 342 ++++++++++++ packages/dd-trace/test/plugins/externals.json | 35 +- yarn.lock | 8 +- 32 files changed, 2658 insertions(+), 40 deletions(-) create mode 100644 packages/datadog-instrumentations/src/express-mongo-sanitize.js create mode 100644 packages/datadog-instrumentations/src/mongodb.js create mode 100644 packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js create mode 100644 packages/datadog-instrumentations/test/mongoose.spec.js create mode 100644 packages/dd-trace/src/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.js create mode 100644 packages/dd-trace/src/appsec/iast/taint-tracking/secure-marks-generator.js create mode 100644 packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/json-sensitive-analyzer.js create mode 100644 packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-regex.js create mode 100644 packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.spec.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/resources/mongodb-vulnerable-method.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/resources/mongoose-vulnerable-method.js create mode 100644 packages/dd-trace/test/appsec/iast/taint-tracking/secure-marks-generator.spec.js create mode 100644 packages/dd-trace/test/appsec/iast/vulnerability-formatter/utils.spec.js diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index 05bfdb8f257..cc46e3ff25c 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -131,6 +131,46 @@ jobs: - run: yarn test:appsec:plugins:ci - uses: codecov/codecov-action@v2 + mongodb-core: + runs-on: ubuntu-latest + services: + mongodb: + image: circleci/mongo + ports: + - 27017:27017 + env: + PLUGINS: express-mongo-sanitize + SERVICES: mongo + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/node/setup + - run: yarn install + - uses: ./.github/actions/node/oldest + - run: yarn test:appsec:plugins:ci + - uses: ./.github/actions/node/latest + - run: yarn test:appsec:plugins:ci + - uses: codecov/codecov-action@v2 + + mongoose: + runs-on: ubuntu-latest + services: + mongodb: + image: circleci/mongo + ports: + - 27017:27017 + env: + PLUGINS: mongoose + SERVICES: mongo + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/node/setup + - run: yarn install + - uses: ./.github/actions/node/oldest + - run: yarn test:appsec:plugins:ci + - uses: ./.github/actions/node/latest + - run: yarn test:appsec:plugins:ci + - uses: codecov/codecov-action@v2 + sourcing: runs-on: ubuntu-latest env: diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 77d3bc43c6f..4f37decd53c 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -670,7 +670,7 @@ jobs: ports: - 27017:27017 env: - PLUGINS: mongodb-core + PLUGINS: mongodb-core|express-mongo-sanitize SERVICES: mongo steps: - uses: actions/checkout@v2 diff --git a/package.json b/package.json index da845b42911..8612f4eead3 100644 --- a/package.json +++ b/package.json @@ -70,7 +70,7 @@ "dependencies": { "@datadog/native-appsec": "^4.0.0", "@datadog/native-iast-rewriter": "2.1.3", - "@datadog/native-iast-taint-tracking": "1.5.0", + "@datadog/native-iast-taint-tracking": "1.6.1", "@datadog/native-metrics": "^2.0.0", "@datadog/pprof": "4.0.0", "@datadog/sketches-js": "^2.1.0", diff --git a/packages/datadog-instrumentations/src/express-mongo-sanitize.js b/packages/datadog-instrumentations/src/express-mongo-sanitize.js new file mode 100644 index 00000000000..897ab4e32c1 --- /dev/null +++ b/packages/datadog-instrumentations/src/express-mongo-sanitize.js @@ -0,0 +1,45 @@ +'use strict' + +const { + channel, + addHook +} = require('./helpers/instrument') +const shimmer = require('../../datadog-shimmer') + +const sanitizeMethodFinished = channel('datadog:express-mongo-sanitize:sanitize:finish') +const sanitizeMiddlewareFinished = channel('datadog:express-mongo-sanitize:filter:finish') + +const propertiesToSanitize = ['body', 'params', 'headers', 'query'] + +addHook({ name: 'express-mongo-sanitize', versions: ['>=1.0.0'] }, expressMongoSanitize => { + shimmer.wrap(expressMongoSanitize, 'sanitize', sanitize => function () { + const sanitizedObject = sanitize.apply(this, arguments) + + if (sanitizeMethodFinished.hasSubscribers) { + sanitizeMethodFinished.publish({ sanitizedObject }) + } + + return sanitizedObject + }) + + return shimmer.wrap(expressMongoSanitize, function () { + const middleware = expressMongoSanitize.apply(this, arguments) + + return shimmer.wrap(middleware, function (req, res, next) { + if (!sanitizeMiddlewareFinished.hasSubscribers) { + return middleware.apply(this, arguments) + } + + const wrappedNext = shimmer.wrap(next, function () { + sanitizeMiddlewareFinished.publish({ + sanitizedProperties: propertiesToSanitize, + req + }) + + return next.apply(this, arguments) + }) + + return middleware.call(this, req, res, wrappedNext) + }) + }) +}) diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js index 442b148f953..de136bce5e7 100644 --- a/packages/datadog-instrumentations/src/helpers/hooks.js +++ b/packages/datadog-instrumentations/src/helpers/hooks.js @@ -38,6 +38,7 @@ module.exports = { 'dns': () => require('../dns'), 'elasticsearch': () => require('../elasticsearch'), 'express': () => require('../express'), + 'express-mongo-sanitize': () => require('../express-mongo-sanitize'), 'fastify': () => require('../fastify'), 'find-my-way': () => require('../find-my-way'), 'fs': () => require('../fs'), @@ -68,7 +69,7 @@ module.exports = { 'mocha': () => require('../mocha'), 'mocha-each': () => require('../mocha'), 'moleculer': () => require('../moleculer'), - 'mongodb': () => require('../mongodb-core'), + 'mongodb': () => require('../mongodb'), 'mongodb-core': () => require('../mongodb-core'), 'mongoose': () => require('../mongoose'), 'mysql': () => require('../mysql'), diff --git a/packages/datadog-instrumentations/src/mongodb.js b/packages/datadog-instrumentations/src/mongodb.js new file mode 100644 index 00000000000..6dc20b5a8c7 --- /dev/null +++ b/packages/datadog-instrumentations/src/mongodb.js @@ -0,0 +1,63 @@ +'use strict' + +require('./mongodb-core') + +const { + channel, + addHook, + AsyncResource +} = require('./helpers/instrument') +const shimmer = require('../../datadog-shimmer') + +// collection methods with filter +const collectionMethodsWithFilter = [ + 'count', + 'countDocuments', + 'deleteMany', + 'deleteOne', + 'find', + 'findOneAndDelete', + 'findOneAndReplace', + 'replaceOne' +] // findOne is ignored because it calls to find + +const collectionMethodsWithTwoFilters = [ + 'findOneAndUpdate', + 'updateMany', + 'updateOne' +] + +const startCh = channel('datadog:mongodb:collection:filter:start') + +addHook({ name: 'mongodb', versions: ['>=3.3'] }, mongodb => { + [...collectionMethodsWithFilter, ...collectionMethodsWithTwoFilters].forEach(methodName => { + if (!(methodName in mongodb.Collection.prototype)) return + + const useTwoArguments = collectionMethodsWithTwoFilters.includes(methodName) + + shimmer.wrap(mongodb.Collection.prototype, methodName, method => { + return function () { + if (!startCh.hasSubscribers) { + return method.apply(this, arguments) + } + + const asyncResource = new AsyncResource('bound-anonymous-fn') + + return asyncResource.runInAsyncScope(() => { + const filters = [arguments[0]] + if (useTwoArguments) { + filters.push(arguments[1]) + } + + startCh.publish({ + filters, + methodName + }) + + return method.apply(this, arguments) + }) + } + }) + }) + return mongodb +}) diff --git a/packages/datadog-instrumentations/src/mongoose.js b/packages/datadog-instrumentations/src/mongoose.js index 9de999b768c..4b13eaccdb3 100644 --- a/packages/datadog-instrumentations/src/mongoose.js +++ b/packages/datadog-instrumentations/src/mongoose.js @@ -1,6 +1,6 @@ 'use strict' -const { addHook } = require('./helpers/instrument') +const { addHook, channel } = require('./helpers/instrument') const { wrapThen } = require('./helpers/promise') const { AsyncResource } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') @@ -26,5 +26,144 @@ addHook({ } shimmer.wrap(mongoose.Collection.prototype, 'addQueue', wrapAddQueue) + return mongoose }) + +const startCh = channel('datadog:mongoose:model:filter:start') +const finishCh = channel('datadog:mongoose:model:filter:finish') + +const collectionMethodsWithFilter = [ + 'count', + 'countDocuments', + 'deleteMany', + 'deleteOne', + 'find', + 'findOne', + 'findOneAndDelete', + 'findOneAndReplace', + 'replaceOne', + 'remove' +] + +const collectionMethodsWithTwoFilters = [ + 'findOneAndUpdate', + 'updateMany', + 'updateOne' +] + +addHook({ + name: 'mongoose', + versions: ['>=4.6.4 <5', '5', '6', '>=7'], + file: 'lib/model.js' +}, Model => { + [...collectionMethodsWithFilter, ...collectionMethodsWithTwoFilters].forEach(methodName => { + const useTwoArguments = collectionMethodsWithTwoFilters.includes(methodName) + if (!(methodName in Model)) return + + shimmer.wrap(Model, methodName, method => { + return function wrappedModelMethod () { + if (!startCh.hasSubscribers) { + return method.apply(this, arguments) + } + + const asyncResource = new AsyncResource('bound-anonymous-fn') + + const filters = [arguments[0]] + if (useTwoArguments) { + filters.push(arguments[1]) + } + + const finish = asyncResource.bind(function () { + finishCh.publish() + }) + + let callbackWrapped = false + const lastArgumentIndex = arguments.length - 1 + + if (typeof arguments[lastArgumentIndex] === 'function') { + // is a callback, wrap it to execute finish() + shimmer.wrap(arguments, lastArgumentIndex, originalCb => { + return function () { + finish() + + return originalCb.apply(this, arguments) + } + }) + + callbackWrapped = true + } + + return asyncResource.runInAsyncScope(() => { + startCh.publish({ + filters, + methodName + }) + + const res = method.apply(this, arguments) + + // if it is not callback, wrap exec method and its then + if (!callbackWrapped) { + shimmer.wrap(res, 'exec', originalExec => { + return function wrappedExec () { + const execResult = originalExec.apply(this, arguments) + + // wrap them method, wrap resolve and reject methods + shimmer.wrap(execResult, 'then', originalThen => { + return function wrappedThen () { + const resolve = arguments[0] + const reject = arguments[1] + + // not using shimmer here because resolve/reject could be empty + arguments[0] = function wrappedResolve () { + finish() + + if (resolve) { + return resolve.apply(this, arguments) + } + } + + arguments[1] = function wrappedReject () { + finish() + + if (reject) { + return reject.apply(this, arguments) + } + } + + return originalThen.apply(this, arguments) + } + }) + + return execResult + } + }) + } + return res + }) + } + }) + }) + + return Model +}) + +const sanitizeFilterFinishCh = channel('datadog:mongoose:sanitize-filter:finish') + +addHook({ + name: 'mongoose', + versions: ['6', '>=7'], + file: 'lib/helpers/query/sanitizeFilter.js' +}, sanitizeFilter => { + return shimmer.wrap(sanitizeFilter, function wrappedSanitizeFilter () { + const sanitizedObject = sanitizeFilter.apply(this, arguments) + + if (sanitizeFilterFinishCh.hasSubscribers) { + sanitizeFilterFinishCh.publish({ + sanitizedObject + }) + } + + return sanitizedObject + }) +}) diff --git a/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js b/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js new file mode 100644 index 00000000000..672ea492747 --- /dev/null +++ b/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js @@ -0,0 +1,210 @@ +'use strict' + +const agent = require('../../dd-trace/test/plugins/agent') +const getPort = require('get-port') +const { channel } = require('../../diagnostics_channel') +const axios = require('axios') +describe('express-mongo-sanitize', () => { + withVersions('express-mongo-sanitize', 'express-mongo-sanitize', version => { + describe('middleware', () => { + const sanitizeMiddlewareFinished = channel('datadog:express-mongo-sanitize:filter:finish') + let port, server, requestBody + + before(() => { + return agent.load(['express', 'express-mongo-sanitize'], { client: false }) + }) + + before((done) => { + const express = require('../../../versions/express').get() + const expressMongoSanitize = require(`../../../versions/express-mongo-sanitize@${version}`).get() + const app = express() + + app.use(expressMongoSanitize()) + app.all('/', (req, res) => { + requestBody(req, res) + res.end() + }) + + getPort().then(newPort => { + port = newPort + server = app.listen(port, () => { + done() + }) + }) + }) + + beforeEach(() => { + requestBody = sinon.stub() + }) + + after(() => { + server.close() + return agent.close({ ritmReset: false }) + }) + + describe('without subscriptions', () => { + it('it continues working without sanitization request', async () => { + expect(sanitizeMiddlewareFinished.hasSubscribers).to.be.false + + await axios.get(`http://localhost:${port}/?param=paramvalue`) + + expect(requestBody).to.be.calledOnce + expect(requestBody.firstCall.args[0].query.param).to.be.equal('paramvalue') + }) + + it('it continues working with sanitization request', async () => { + expect(sanitizeMiddlewareFinished.hasSubscribers).to.be.false + + await axios.get(`http://localhost:${port}/?param[$eq]=paramvalue`) + + expect(requestBody).to.be.calledOnce + expect(requestBody.firstCall.args[0].query.param['$eq']).to.be.undefined + }) + }) + + describe('with subscriptions', () => { + let subscription + + beforeEach(() => { + subscription = sinon.stub() + sanitizeMiddlewareFinished.subscribe(subscription) + }) + + afterEach(() => { + sanitizeMiddlewareFinished.unsubscribe(subscription) + }) + + it('it continues working without sanitization request', async () => { + expect(sanitizeMiddlewareFinished.hasSubscribers).to.be.true + + await axios.get(`http://localhost:${port}/?param=paramvalue`) + + expect(requestBody).to.be.calledOnce + expect(requestBody.firstCall.args[0].query.param).to.be.equal('paramvalue') + }) + + it('it continues working with sanitization request', async () => { + expect(sanitizeMiddlewareFinished.hasSubscribers).to.be.true + + await axios.get(`http://localhost:${port}/?param[$eq]=paramvalue`) + + expect(requestBody).to.be.calledOnce + expect(requestBody.firstCall.args[0].query.param['$eq']).to.be.undefined + }) + + it('subscription is called with expected parameters without sanitization request', async () => { + expect(sanitizeMiddlewareFinished.hasSubscribers).to.be.true + + await axios.get(`http://localhost:${port}/?param=paramvalue`) + + expect(subscription).to.be.calledOnce + expect(subscription.firstCall.args[0].sanitizedProperties) + .to.be.deep.equal(['body', 'params', 'headers', 'query']) + expect(subscription.firstCall.args[0].req.query.param).to.be.equal('paramvalue') + }) + + it('subscription is called with expected parameters with sanitization request', async () => { + expect(sanitizeMiddlewareFinished.hasSubscribers).to.be.true + + await axios.get(`http://localhost:${port}/?param[$eq]=paramvalue`) + + expect(subscription).to.be.calledOnce + expect(subscription.firstCall.args[0].sanitizedProperties) + .to.be.deep.equal(['body', 'params', 'headers', 'query']) + expect(subscription.firstCall.args[0].req.query.param['$eq']).to.be.undefined + }) + }) + }) + + describe('sanitize method', () => { + const sanitizeFinished = channel('datadog:express-mongo-sanitize:sanitize:finish') + let expressMongoSanitize + + before(() => { + return agent.load(['express-mongo-sanitize'], { client: false }) + }) + + before(() => { + expressMongoSanitize = require(`../../../versions/express-mongo-sanitize@${version}`).get() + }) + + after(() => { + return agent.close({ ritmReset: false }) + }) + + describe('without subscriptions', () => { + it('it works as expected without modifications', () => { + expect(sanitizeFinished.hasSubscribers).to.be.false + + const objectToSanitize = { + safeKey: 'safeValue' + } + + const sanitizedObject = expressMongoSanitize.sanitize(objectToSanitize) + + expect(sanitizedObject.safeKey).to.be.equal(objectToSanitize.safeKey) + }) + + it('it works as expected with modifications', () => { + expect(sanitizeFinished.hasSubscribers).to.be.false + + const objectToSanitize = { + unsafeKey: { + '$ne': 'test' + }, + safeKey: 'safeValue' + } + + const sanitizedObject = expressMongoSanitize.sanitize(objectToSanitize) + + expect(sanitizedObject.safeKey).to.be.equal(objectToSanitize.safeKey) + expect(sanitizedObject.unsafeKey['$ne']).to.be.undefined + }) + }) + + describe('with subscriptions', () => { + let subscription + + beforeEach(() => { + subscription = sinon.stub() + sanitizeFinished.subscribe(subscription) + }) + + afterEach(() => { + sanitizeFinished.unsubscribe(subscription) + subscription = undefined + }) + + it('it works as expected without modifications', () => { + expect(sanitizeFinished.hasSubscribers).to.be.true + + const objectToSanitize = { + safeKey: 'safeValue' + } + + const sanitizedObject = expressMongoSanitize.sanitize(objectToSanitize) + + expect(sanitizedObject.safeKey).to.be.equal(objectToSanitize.safeKey) + expect(subscription).to.be.calledOnceWith({ sanitizedObject }) + }) + + it('it works as expected with modifications', () => { + expect(sanitizeFinished.hasSubscribers).to.be.true + + const objectToSanitize = { + unsafeKey: { + '$ne': 'test' + }, + safeKey: 'safeValue' + } + + const sanitizedObject = expressMongoSanitize.sanitize(objectToSanitize) + + expect(sanitizedObject.safeKey).to.be.equal(objectToSanitize.safeKey) + expect(sanitizedObject.unsafeKey['$ne']).to.be.undefined + expect(subscription).to.be.calledOnceWith({ sanitizedObject }) + }) + }) + }) + }) +}) diff --git a/packages/datadog-instrumentations/test/mongoose.spec.js b/packages/datadog-instrumentations/test/mongoose.spec.js new file mode 100644 index 00000000000..28fdf4764f1 --- /dev/null +++ b/packages/datadog-instrumentations/test/mongoose.spec.js @@ -0,0 +1,513 @@ +'use strict' + +const agent = require('../../dd-trace/test/plugins/agent') +const { channel } = require('../src/helpers/instrument') +const semver = require('semver') + +const startCh = channel('datadog:mongoose:model:filter:start') +const finishCh = channel('datadog:mongoose:model:filter:finish') + +const sanitizeFilterFinishCh = channel('datadog:mongoose:sanitize-filter:finish') +describe('mongoose instrumentations', () => { + // hack to be able to exclude cb test executions in >=7 + const iterationRanges = ['>4.0.0 <=6', '>=7'] + iterationRanges.forEach(range => { + describe(range, () => { + withVersions('mongoose', ['mongoose'], range, (version) => { + let Test, dbName, id, mongoose + + function connect () { + mongoose.connect(`mongodb://localhost:27017/${dbName}`, { + useNewUrlParser: true, + useUnifiedTopology: true + }) + } + + before(() => { + return agent.load(['mongoose']) + }) + + before(() => { + id = require('../../dd-trace/src/id') + dbName = id().toString() + + mongoose = require(`../../../versions/mongoose@${version}`).get() + + connect() + + Test = mongoose.model('Test', { name: String, type: String, other: String }) + }) + + beforeEach((done) => { + Test.insertMany([ + { + name: 'test1', + other: 'other1', + type: 'test' + }, + { + name: 'test2', + other: 'other2', + type: 'test' + }, + { + name: 'test3', + other: 'other3', + type: 'test' + }]).then(() => done()) + }) + + afterEach((done) => { + const deleteFilter = { + type: 'test' + } + + // some versions have deleteMany methods and others just delete + if (typeof Test.deleteMany === 'function') { + Test.deleteMany(deleteFilter).then(() => done()) + } else { + Test.remove(deleteFilter).then(() => done()) + } + }) + + after(() => { + return mongoose.disconnect() + }) + + after(() => { + return agent.close({ ritmReset: false }) + }) + + function testCallbacksCalled (methodName, filters, ...args) { + if (range !== '>=7') { + it('channel events published with cb', (done) => { + const start = sinon.stub() + const finish = sinon.stub() + startCh.subscribe(start) + finishCh.subscribe(finish) + + Test[methodName](...filters, ...args, () => { + startCh.unsubscribe(start) + finishCh.unsubscribe(finish) + + expect(start).to.have.been.calledOnceWith({ filters, methodName }) + expect(finish).to.have.been.calledOnce + + done() + }) + }) + } + + it('channel events published with then', (done) => { + const start = sinon.stub() + const finish = sinon.stub() + startCh.subscribe(start) + finishCh.subscribe(finish) + + Test[methodName](...filters, ...args).then(() => { + startCh.unsubscribe(start) + finishCh.unsubscribe(finish) + + expect(start).to.have.been.calledOnceWith({ filters, methodName }) + expect(finish).to.have.been.calledOnce + + done() + }) + }) + } + + describe('Model methods', () => { + describe('count', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.count({ type: 'test' }, (err, res) => { + expect(err).to.be.null + expect(res).to.be.equal(3) + + done() + }) + }) + } + + it('continue working as expected with promise', (done) => { + Test.count({ type: 'test' }).then((res) => { + expect(res).to.be.equal(3) + + done() + }) + }) + + testCallbacksCalled('count', [{ type: 'test' }]) + }) + + if (semver.intersects(version, '>=6')) { + describe('countDocuments', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.countDocuments({ type: 'test' }, (err, res) => { + expect(err).to.be.null + expect(res).to.be.equal(3) + + done() + }) + }) + } + + it('continue working as expected with then', (done) => { + Test.countDocuments({ type: 'test' }).then((res) => { + expect(res).to.be.equal(3) + + done() + }) + }) + + testCallbacksCalled('countDocuments', [{ type: 'test' }]) + }) + } + + if (semver.intersects(version, '>=5')) { + describe('deleteOne', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.deleteOne({ type: 'test' }, (err) => { + expect(err).to.be.null + + Test.count({ type: 'test' }, (err, res) => { + expect(res).to.be.equal(2) // 3 -> delete 1 -> 2 + + done() + }) + }) + }) + } + + it('continue working as expected with then', (done) => { + Test.deleteOne({ type: 'test' }).then(() => { + Test.count({ type: 'test' }).then((res) => { + expect(res).to.be.equal(2) // 3 -> delete 1 -> 2 + + done() + }) + }) + }) + + testCallbacksCalled('deleteOne', [{ type: 'test' }]) + }) + } + + describe('find', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.find({ type: 'test' }, (err, items) => { + expect(err).to.be.null + expect(items.length).to.be.equal(3) + + done() + }) + }) + } + + it('continue working as expected with then', (done) => { + Test.find({ type: 'test' }).then((items) => { + expect(items.length).to.be.equal(3) + + done() + }) + }) + + testCallbacksCalled('find', [{ type: 'test' }]) + }) + + describe('findOne', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.findOne({ type: 'test' }, (err, item) => { + expect(err).to.be.null + expect(item).not.to.be.null + expect(item.name).to.be.equal('test1') + + done() + }) + }) + } + + it('continue working as expected with then', (done) => { + Test.findOne({ type: 'test' }).then((item) => { + expect(item).not.to.be.null + expect(item.name).to.be.equal('test1') + + done() + }) + }) + + testCallbacksCalled('findOne', [{ type: 'test' }]) + }) + + if (semver.intersects(version, '>=6')) { + describe('findOneAndDelete', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.findOneAndDelete({ type: 'test' }, (err, item) => { + expect(err).to.be.null + expect(item).not.to.be.null + expect(item.name).to.be.equal('test1') + + Test.count({ type: 'test' }, (err, res) => { + expect(res).to.be.equal(2) // 3 -> delete 1 -> 2 + + done() + }) + }) + }) + } + + it('continue working as expected with then', (done) => { + Test.findOneAndDelete({ type: 'test' }).then((item) => { + expect(item).not.to.be.null + expect(item.name).to.be.equal('test1') + + Test.count({ type: 'test' }).then((res) => { + expect(res).to.be.equal(2) // 3 -> delete 1 -> 2 + + done() + }) + }) + }) + + testCallbacksCalled('findOneAndDelete', [{ type: 'test' }]) + }) + } + + if (semver.intersects(version, '>=6')) { + describe('findOneAndReplace', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.findOneAndReplace({ name: 'test1' }, { + name: 'test1-modified', + type: 'test' + }, (err) => { + expect(err).to.be.null + + Test.find({ name: 'test1-modified' }, (err, item) => { + expect(err).to.be.null + expect(item).not.to.be.null + + done() + }) + }) + }) + } + + it('continue working as expected with then', (done) => { + Test.findOneAndReplace({ name: 'test1' }, { + name: 'test1-modified', + type: 'test' + }).then(() => { + Test.find({ name: 'test1-modified' }).then((item) => { + expect(item).not.to.be.null + + done() + }) + }) + }) + + testCallbacksCalled('findOneAndDelete', [{ type: 'test' }], { + name: 'test1-modified', + type: 'test' + }) + }) + } + + if (semver.intersects(version, '>=5')) { + describe('replaceOne', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.replaceOne({ name: 'test1' }, { + name: 'test1-modified', + type: 'test' + }, (err) => { + expect(err).to.be.null + + Test.find({ name: 'test1-modified' }, (err, item) => { + expect(err).to.be.null + expect(item).not.to.be.null + + done() + }) + }) + }) + } + + it('continue working as expected with then', (done) => { + Test.replaceOne({ name: 'test1' }, { + name: 'test1-modified', + type: 'test' + }).then(() => { + Test.find({ name: 'test1-modified' }).then((item) => { + expect(item).not.to.be.null + + done() + }) + }) + }) + + testCallbacksCalled('replaceOne', [{ type: 'test' }], { + name: 'test1-modified', + type: 'test' + }) + }) + } + + describe('findOneAndUpdate', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.findOneAndUpdate({ name: 'test1' }, { '$set': { name: 'test1-modified' } }, (err) => { + expect(err).to.be.null + + Test.findOne({ name: 'test1-modified' }, (err, item) => { + expect(err).to.be.null + expect(item).not.to.be.null + + done() + }) + }) + }) + } + + it('continue working as expected with then', (done) => { + Test.findOneAndUpdate({ name: 'test1' }, { '$set': { name: 'test1-modified' } }).then((res) => { + Test.findOne({ name: 'test1-modified' }).then((item) => { + expect(item).not.to.be.null + + done() + }) + }) + }) + + testCallbacksCalled('findOneAndUpdate', [{ type: 'test' }, { '$set': { name: 'test1-modified' } }]) + }) + + if (semver.intersects(version, '>=5')) { + describe('updateMany', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.updateMany({ type: 'test' }, { + '$set': { + other: 'modified-other' + } + }, (err) => { + expect(err).to.be.null + + Test.find({ type: 'test' }, (err, items) => { + expect(err).to.be.null + expect(items.length).to.be.equal(3) + + items.forEach(item => { + expect(item.other).to.be.equal('modified-other') + }) + + done() + }) + }) + }) + } + + it('continue working as expected with then', (done) => { + Test.updateMany({ type: 'test' }, { + '$set': { + other: 'modified-other' + } + }).then((err) => { + Test.find({ type: 'test' }).then((items) => { + expect(items.length).to.be.equal(3) + + items.forEach(item => { + expect(item.other).to.be.equal('modified-other') + }) + + done() + }) + }) + }) + + testCallbacksCalled('updateMany', [{ type: 'test' }, { '$set': { other: 'modified-other' } }]) + }) + } + + if (semver.intersects(version, '>=5')) { + describe('updateOne', () => { + if (range !== '>=7') { + it('continue working as expected with cb', (done) => { + Test.updateOne({ name: 'test1' }, { + '$set': { + other: 'modified-other' + } + }, (err) => { + expect(err).to.be.null + + Test.findOne({ name: 'test1' }, (err, item) => { + expect(err).to.be.null + expect(item.other).to.be.equal('modified-other') + + done() + }) + }) + }) + } + + it('continue working as expected with then', (done) => { + Test.updateOne({ name: 'test1' }, { + '$set': { + other: 'modified-other' + } + }).then(() => { + Test.findOne({ name: 'test1' }).then((item) => { + expect(item.other).to.be.equal('modified-other') + + done() + }) + }) + }) + + testCallbacksCalled('updateOne', [{ name: 'test1' }, { '$set': { other: 'modified-other' } }]) + }) + } + }) + + if (semver.intersects(version, '>=6')) { + describe('sanitizeFilter', () => { + it('continues working as expected without sanitization', () => { + const source = { 'username': 'test' } + const expected = { 'username': 'test' } + + const sanitizedObject = mongoose.sanitizeFilter(source) + + expect(sanitizedObject).to.be.deep.equal(expected) + }) + + it('continues working as expected without sanitization', () => { + const source = { 'username': { '$ne': 'test' } } + const expected = { 'username': { '$eq': { '$ne': 'test' } } } + + const sanitizedObject = mongoose.sanitizeFilter(source) + + expect(sanitizedObject).to.be.deep.equal(expected) + }) + + it('channel is published with the result object', () => { + const source = { 'username': { '$ne': 'test' } } + + const listener = sinon.stub() + sanitizeFilterFinishCh.subscribe(listener) + const sanitizedObject = mongoose.sanitizeFilter(source) + + sanitizeFilterFinishCh.unsubscribe(listener) + + expect(listener).to.have.been.calledOnceWith({ sanitizedObject }) + }) + }) + } + }) + }) + }) +}) diff --git a/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js b/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js index baf32bdb2e4..d76b8bd1e7f 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js @@ -7,6 +7,7 @@ module.exports = { 'LDAP_ANALYZER': require('./ldap-injection-analyzer'), 'NO_HTTPONLY_COOKIE_ANALYZER': require('./no-httponly-cookie-analyzer'), 'NO_SAMESITE_COOKIE_ANALYZER': require('./no-samesite-cookie-analyzer'), + 'NOSQL_MONGODB_INJECTION': require('./nosql-injection-mongodb-analyzer'), 'PATH_TRAVERSAL_ANALYZER': require('./path-traversal-analyzer'), 'SQL_INJECTION_ANALYZER': require('./sql-injection-analyzer'), 'SSRF': require('./ssrf-analyzer'), diff --git a/packages/dd-trace/src/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.js new file mode 100644 index 00000000000..8855ed6ff9e --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.js @@ -0,0 +1,166 @@ +'use strict' + +const InjectionAnalyzer = require('./injection-analyzer') +const { NOSQL_MONGODB_INJECTION } = require('../vulnerabilities') +const { getRanges, addSecureMark } = require('../taint-tracking/operations') +const { getNodeModulesPaths } = require('../path-line') +const { getNextSecureMark } = require('../taint-tracking/secure-marks-generator') +const { storage } = require('../../../../../datadog-core') +const { getIastContext } = require('../iast-context') + +const EXCLUDED_PATHS_FROM_STACK = getNodeModulesPaths('mongodb', 'mongoose') +const MONGODB_NOSQL_SECURE_MARK = getNextSecureMark() + +function iterateObjectStrings (target, fn, levelKeys = [], depth = 50, visited = new Set()) { + if (target && typeof target === 'object') { + Object.keys(target).forEach((key) => { + const nextLevelKeys = [...levelKeys, key] + const val = target[key] + + if (typeof val === 'string') { + fn(val, nextLevelKeys, target, key) + } else if (depth > 0 && !visited.has(val)) { + iterateObjectStrings(val, fn, nextLevelKeys, depth - 1, visited) + visited.add(val) + } + }) + } +} + +class NosqlInjectionMongodbAnalyzer extends InjectionAnalyzer { + constructor () { + super(NOSQL_MONGODB_INJECTION) + this.sanitizedObjects = new WeakSet() + } + + onConfigure () { + this.configureSanitizers() + + this.addSub('datadog:mongodb:collection:filter:start', ({ filters }) => { + const store = storage.getStore() + if (store && !store.nosqlAnalyzed && filters?.length) { + filters.forEach(filter => { + this.analyze({ filter }, store) + }) + } + }) + + this.addSub('datadog:mongoose:model:filter:start', ({ filters }) => { + const store = storage.getStore() + if (!store) return + + if (filters?.length) { + filters.forEach(filter => { + this.analyze({ filter }, store) + }) + } + + storage.enterWith({ ...store, nosqlAnalyzed: true, mongooseParentStore: store }) + }) + + this.addSub('datadog:mongoose:model:filter:finish', () => { + const store = storage.getStore() + if (store?.mongooseParentStore) { + storage.enterWith(store.mongooseParentStore) + } + }) + } + + configureSanitizers () { + this.addNotSinkSub('datadog:express-mongo-sanitize:filter:finish', ({ sanitizedProperties, req }) => { + const store = storage.getStore() + const iastContext = getIastContext(store) + + if (iastContext) { // do nothing if we are not in an iast request + sanitizedProperties.forEach(key => { + iterateObjectStrings(req[key], function (value, levelKeys) { + if (typeof value === 'string') { + let parentObj = req[key] + const levelsLength = levelKeys.length + + for (let i = 0; i < levelsLength; i++) { + const currentLevelKey = levelKeys[i] + + if (i === levelsLength - 1) { + parentObj[currentLevelKey] = addSecureMark(iastContext, value, MONGODB_NOSQL_SECURE_MARK) + } else { + parentObj = parentObj[currentLevelKey] + } + } + } + }) + }) + } + }) + + this.addNotSinkSub('datadog:express-mongo-sanitize:sanitize:finish', ({ sanitizedObject }) => { + const store = storage.getStore() + const iastContext = getIastContext(store) + + if (iastContext) { // do nothing if we are not in an iast request + iterateObjectStrings(sanitizedObject, function (value, levelKeys, parent, lastKey) { + try { + parent[lastKey] = addSecureMark(iastContext, value, MONGODB_NOSQL_SECURE_MARK) + } catch { + // if it is a readonly property, do nothing + } + }) + } + }) + + this.addNotSinkSub('datadog:mongoose:sanitize-filter:finish', ({ sanitizedObject }) => { + this.sanitizedObjects.add(sanitizedObject) + }) + } + + _isVulnerable (value, iastContext) { + if (value?.filter && iastContext) { + let isVulnerable = false + + if (this.sanitizedObjects.has(value.filter)) { + return false + } + + const rangesByKey = {} + const allRanges = [] + + iterateObjectStrings(value.filter, function (val, nextLevelKeys) { + const ranges = getRanges(iastContext, val) + if (ranges?.length) { + const filteredRanges = [] + + for (const range of ranges) { + if ((range.secureMarks & MONGODB_NOSQL_SECURE_MARK) !== MONGODB_NOSQL_SECURE_MARK) { + isVulnerable = true + filteredRanges.push(range) + } + } + + if (filteredRanges.length > 0) { + rangesByKey[nextLevelKeys.join('.')] = filteredRanges + allRanges.push(...filteredRanges) + } + } + }, [], 4) + + if (isVulnerable) { + value.rangesToApply = rangesByKey + value.ranges = allRanges + } + + return isVulnerable + } + return false + } + + _getEvidence (value, iastContext) { + return { value: value.filter, rangesToApply: value.rangesToApply, ranges: value.ranges } + } + + _getExcludedPaths () { + return EXCLUDED_PATHS_FROM_STACK + } +} + +module.exports = new NosqlInjectionMongodbAnalyzer() +module.exports.MONGODB_NOSQL_SECURE_MARK = MONGODB_NOSQL_SECURE_MARK diff --git a/packages/dd-trace/src/appsec/iast/analyzers/vulnerability-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/vulnerability-analyzer.js index cad650c0ed0..1f52790300d 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/vulnerability-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/vulnerability-analyzer.js @@ -71,8 +71,7 @@ class Analyzer extends SinkIastPlugin { return store && !iastContext } - analyze (value) { - const store = storage.getStore() + analyze (value, store = storage.getStore()) { const iastContext = getIastContext(store) if (this._isInvalidContext(store, iastContext)) return diff --git a/packages/dd-trace/src/appsec/iast/iast-plugin.js b/packages/dd-trace/src/appsec/iast/iast-plugin.js index e70c0d82b5f..6c8d2c40535 100644 --- a/packages/dd-trace/src/appsec/iast/iast-plugin.js +++ b/packages/dd-trace/src/appsec/iast/iast-plugin.js @@ -196,6 +196,10 @@ class SinkIastPlugin extends IastPlugin { addSub (iastPluginSub, handler) { return super.addSub({ tagKey: TagKey.VULNERABILITY_TYPE, ...iastPluginSub }, handler) } + + addNotSinkSub (iastPluginSub, handler) { + return super.addSub(iastPluginSub, handler) + } } module.exports = { diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/operations.js b/packages/dd-trace/src/appsec/iast/taint-tracking/operations.js index bed321d009d..8240b358419 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/operations.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/operations.js @@ -18,15 +18,14 @@ let onRemoveTransaction = (transactionId, iastContext) => {} function onRemoveTransactionInformationTelemetry (transactionId, iastContext) { const metrics = TaintedUtils.getMetrics(transactionId, iastTelemetry.verbosity) - if (metrics && metrics.requestCount) { + if (metrics?.requestCount) { REQUEST_TAINTED.add(metrics.requestCount, null, iastContext) } } function removeTransaction (iastContext) { - if (iastContext && iastContext[IAST_TRANSACTION_ID]) { - const transactionId = iastContext[IAST_TRANSACTION_ID] - + const transactionId = iastContext?.[IAST_TRANSACTION_ID] + if (transactionId) { onRemoveTransaction(transactionId, iastContext) TaintedUtils.removeTransaction(transactionId) @@ -36,8 +35,8 @@ function removeTransaction (iastContext) { function newTaintedString (iastContext, string, name, type) { let result = string - if (iastContext && iastContext[IAST_TRANSACTION_ID]) { - const transactionId = iastContext[IAST_TRANSACTION_ID] + const transactionId = iastContext?.[IAST_TRANSACTION_ID] + if (transactionId) { result = TaintedUtils.newTaintedString(transactionId, string, name, type) } else { result = string @@ -47,15 +46,17 @@ function newTaintedString (iastContext, string, name, type) { function taintObject (iastContext, object, type, keyTainting, keyType) { let result = object - if (iastContext && iastContext[IAST_TRANSACTION_ID]) { - const transactionId = iastContext[IAST_TRANSACTION_ID] + const transactionId = iastContext?.[IAST_TRANSACTION_ID] + if (transactionId) { const queue = [{ parent: null, property: null, value: object }] const visited = new WeakSet() + while (queue.length > 0) { const { parent, property, value, key } = queue.pop() if (value === null) { continue } + try { if (typeof value === 'string') { const tainted = TaintedUtils.newTaintedString(transactionId, value, property, type) @@ -71,11 +72,13 @@ function taintObject (iastContext, object, type, keyTainting, keyType) { } } else if (typeof value === 'object' && !visited.has(value)) { visited.add(value) + const keys = Object.keys(value) for (let i = 0; i < keys.length; i++) { const key = keys[i] queue.push({ parent: value, property: property ? `${property}.${key}` : key, value: value[key], key }) } + if (parent && keyTainting && key) { const taintedProperty = TaintedUtils.newTaintedString(transactionId, key, property, keyType) parent[taintedProperty] = value @@ -91,8 +94,8 @@ function taintObject (iastContext, object, type, keyTainting, keyType) { function isTainted (iastContext, string) { let result = false - if (iastContext && iastContext[IAST_TRANSACTION_ID]) { - const transactionId = iastContext[IAST_TRANSACTION_ID] + const transactionId = iastContext?.[IAST_TRANSACTION_ID] + if (transactionId) { result = TaintedUtils.isTainted(transactionId, string) } else { result = false @@ -102,8 +105,8 @@ function isTainted (iastContext, string) { function getRanges (iastContext, string) { let result = [] - if (iastContext && iastContext[IAST_TRANSACTION_ID]) { - const transactionId = iastContext[IAST_TRANSACTION_ID] + const transactionId = iastContext?.[IAST_TRANSACTION_ID] + if (transactionId) { result = TaintedUtils.getRanges(transactionId, string) } else { result = [] @@ -111,6 +114,15 @@ function getRanges (iastContext, string) { return result } +function addSecureMark (iastContext, string, mark) { + const transactionId = iastContext?.[IAST_TRANSACTION_ID] + if (transactionId) { + return TaintedUtils.addSecureMarksToTaintedString(transactionId, string, mark) + } + + return string +} + function enableTaintOperations (telemetryVerbosity) { if (isInfoAllowed(telemetryVerbosity)) { onRemoveTransaction = onRemoveTransactionInformationTelemetry @@ -132,6 +144,7 @@ function setMaxTransactions (transactions) { } module.exports = { + addSecureMark, createTransaction, removeTransaction, newTaintedString, diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/secure-marks-generator.js b/packages/dd-trace/src/appsec/iast/taint-tracking/secure-marks-generator.js new file mode 100644 index 00000000000..5298667811e --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/secure-marks-generator.js @@ -0,0 +1,13 @@ +'use strict' + +let next = 0 + +function getNextSecureMark () { + return 1 << next++ +} + +function reset () { + next = 0 +} + +module.exports = { getNextSecureMark, reset } diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/json-sensitive-analyzer.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/json-sensitive-analyzer.js new file mode 100644 index 00000000000..18efa8081b8 --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/json-sensitive-analyzer.js @@ -0,0 +1,16 @@ +'use strict' + +const { stringifyWithRanges } = require('../../utils') + +class JsonSensitiveAnalyzer { + extractSensitiveRanges (evidence) { + // expect object evidence + const { value, ranges, sensitiveRanges } = stringifyWithRanges(evidence.value, evidence.rangesToApply, true) + evidence.value = value + evidence.ranges = ranges + + return sensitiveRanges + } +} + +module.exports = JsonSensitiveAnalyzer diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js index 86f9863aeb0..8dcb59f1b45 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js @@ -5,14 +5,12 @@ const vulnerabilities = require('../../vulnerabilities') const { contains, intersects, remove } = require('./range-utils') const CommandSensitiveAnalyzer = require('./sensitive-analyzers/command-sensitive-analyzer') +const JsonSensitiveAnalyzer = require('./sensitive-analyzers/json-sensitive-analyzer') const LdapSensitiveAnalyzer = require('./sensitive-analyzers/ldap-sensitive-analyzer') const SqlSensitiveAnalyzer = require('./sensitive-analyzers/sql-sensitive-analyzer') const UrlSensitiveAnalyzer = require('./sensitive-analyzers/url-sensitive-analyzer') -// eslint-disable-next-line max-len -const DEFAULT_IAST_REDACTION_NAME_PATTERN = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)' -// eslint-disable-next-line max-len -const DEFAULT_IAST_REDACTION_VALUE_PATTERN = '(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,}))' +const { DEFAULT_IAST_REDACTION_NAME_PATTERN, DEFAULT_IAST_REDACTION_VALUE_PATTERN } = require('./sensitive-regex') const REDACTED_SOURCE_BUFFER = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' @@ -23,6 +21,7 @@ class SensitiveHandler { this._sensitiveAnalyzers = new Map() this._sensitiveAnalyzers.set(vulnerabilities.COMMAND_INJECTION, new CommandSensitiveAnalyzer()) + this._sensitiveAnalyzers.set(vulnerabilities.NOSQL_MONGODB_INJECTION, new JsonSensitiveAnalyzer()) this._sensitiveAnalyzers.set(vulnerabilities.LDAP_INJECTION, new LdapSensitiveAnalyzer()) this._sensitiveAnalyzers.set(vulnerabilities.SQL_INJECTION, new SqlSensitiveAnalyzer()) const urlSensitiveAnalyzer = new UrlSensitiveAnalyzer() diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-regex.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-regex.js new file mode 100644 index 00000000000..f1ae7249a42 --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-regex.js @@ -0,0 +1,9 @@ +// eslint-disable-next-line max-len +const DEFAULT_IAST_REDACTION_NAME_PATTERN = '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)' +// eslint-disable-next-line max-len +const DEFAULT_IAST_REDACTION_VALUE_PATTERN = '(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,}))' + +module.exports = { + DEFAULT_IAST_REDACTION_NAME_PATTERN, + DEFAULT_IAST_REDACTION_VALUE_PATTERN +} diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/index.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/index.js index 006ca78c89e..29611ccbc59 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/index.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/index.js @@ -1,4 +1,7 @@ +'use strict' + const sensitiveHandler = require('./evidence-redaction/sensitive-handler') +const { stringifyWithRanges } = require('./utils') class VulnerabilityFormatter { constructor () { @@ -38,6 +41,13 @@ class VulnerabilityFormatter { getUnredactedValueParts (evidence, sourcesIndexes) { const valueParts = [] let fromIndex = 0 + + if (typeof evidence.value === 'object' && evidence.rangesToApply) { + const { value, ranges } = stringifyWithRanges(evidence.value, evidence.rangesToApply) + evidence.value = value + evidence.ranges = ranges + } + evidence.ranges.forEach((range, rangeIndex) => { if (fromIndex < range.start) { valueParts.push({ value: evidence.value.substring(fromIndex, range.start) }) @@ -45,14 +55,16 @@ class VulnerabilityFormatter { valueParts.push({ value: evidence.value.substring(range.start, range.end), source: sourcesIndexes[rangeIndex] }) fromIndex = range.end }) + if (fromIndex < evidence.value.length) { valueParts.push({ value: evidence.value.substring(fromIndex) }) } + return { valueParts } } formatEvidence (type, evidence, sourcesIndexes, sources) { - if (!evidence.ranges) { + if (!evidence.ranges && !evidence.rangesToApply) { if (typeof evidence.value === 'undefined') { return undefined } else { diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js new file mode 100644 index 00000000000..bf065ad3d65 --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js @@ -0,0 +1,169 @@ +'use strict' + +const crypto = require('crypto') +const { DEFAULT_IAST_REDACTION_VALUE_PATTERN } = require('./evidence-redaction/sensitive-regex') + +const STRINGIFY_RANGE_KEY = 'DD_' + crypto.randomBytes(20).toString('hex') +const STRINGIFY_SENSITIVE_KEY = STRINGIFY_RANGE_KEY + 'SENSITIVE' +const STRINGIFY_SENSITIVE_NOT_STRING_KEY = STRINGIFY_SENSITIVE_KEY + 'NOTSTRING' + +// eslint-disable-next-line max-len +const KEYS_REGEX_WITH_SENSITIVE_RANGES = new RegExp(`(?:"(${STRINGIFY_RANGE_KEY}_\\d+_))|(?:"(${STRINGIFY_SENSITIVE_KEY}_\\d+_(\\d+)_))|("${STRINGIFY_SENSITIVE_NOT_STRING_KEY}_\\d+_([\\s0-9.a-zA-Z]*)")`, 'gm') +const KEYS_REGEX_WITHOUT_SENSITIVE_RANGES = new RegExp(`"(${STRINGIFY_RANGE_KEY}_\\d+_)`, 'gm') + +const sensitiveValueRegex = new RegExp(DEFAULT_IAST_REDACTION_VALUE_PATTERN, 'gmi') + +function iterateObject (target, fn, levelKeys = [], depth = 50) { + Object.keys(target).forEach((key) => { + const nextLevelKeys = [...levelKeys, key] + const val = target[key] + + fn(val, nextLevelKeys, target, key) + + if (val !== null && typeof val === 'object') { + iterateObject(val, fn, nextLevelKeys, depth - 1) + } + }) +} + +function stringifyWithRanges (obj, objRanges, loadSensitiveRanges = false) { + let value + const ranges = [] + const sensitiveRanges = [] + objRanges = objRanges || {} + + if (objRanges || loadSensitiveRanges) { + const cloneObj = Array.isArray(obj) ? [] : {} + let counter = 0 + const allRanges = {} + const sensitiveKeysMapping = {} + + iterateObject(obj, (val, levelKeys, parent, key) => { + let currentLevelClone = cloneObj + for (let i = 0; i < levelKeys.length - 1; i++) { + let levelKey = levelKeys[i] + + if (!currentLevelClone[levelKey]) { + const sensitiveKey = sensitiveKeysMapping[levelKey] + if (currentLevelClone[sensitiveKey]) { + levelKey = sensitiveKey + } + } + + currentLevelClone = currentLevelClone[levelKey] + } + + if (loadSensitiveRanges) { + const sensitiveKey = sensitiveKeysMapping[key] + if (sensitiveKey) { + key = sensitiveKey + } else { + sensitiveValueRegex.lastIndex = 0 + + if (sensitiveValueRegex.test(key)) { + const current = counter++ + const id = `${STRINGIFY_SENSITIVE_KEY}_${current}_${key.length}_` + key = `${id}${key}` + } + } + } + + if (typeof val === 'string') { + const ranges = objRanges[levelKeys.join('.')] + if (ranges) { + const current = counter++ + const id = `${STRINGIFY_RANGE_KEY}_${current}_` + + allRanges[id] = ranges + currentLevelClone[key] = `${id}${val}` + } else { + currentLevelClone[key] = val + } + if (loadSensitiveRanges) { + const current = counter++ + const id = `${STRINGIFY_SENSITIVE_KEY}_${current}_${val.length}_` + + currentLevelClone[key] = `${id}${currentLevelClone[key]}` + } + } else if (typeof val !== 'object' || val === null) { + if (loadSensitiveRanges) { + const current = counter++ + const id = `${STRINGIFY_SENSITIVE_NOT_STRING_KEY}_${current}_` + + // this is special, in the final string we should modify "key_value_[null|false|true]..." + // by null|false|..... ignoring the beginning and ending quotes + currentLevelClone[key] = id + val + } else { + currentLevelClone[key] = val + } + } else if (Array.isArray(val)) { + currentLevelClone[key] = [] + } else { + currentLevelClone[key] = {} + } + }) + + value = JSON.stringify(cloneObj, null, 2) + + if (counter > 0) { + let keysRegex + if (loadSensitiveRanges) { + keysRegex = KEYS_REGEX_WITH_SENSITIVE_RANGES + } else { + keysRegex = KEYS_REGEX_WITHOUT_SENSITIVE_RANGES + } + keysRegex.lastIndex = 0 + + let regexRes = keysRegex.exec(value) + while (regexRes) { + const offset = regexRes.index + 1 // +1 to increase the " char + + if (regexRes[1]) { + // is a range + const rangesId = regexRes[1] + value = value.replace(rangesId, '') + + const updatedRanges = allRanges[rangesId].map(range => { + return { + ...range, + start: range.start + offset, + end: range.end + offset + } + }) + + ranges.push(...updatedRanges) + } else if (regexRes[2]) { + // is a sensitive string literal + const sensitiveId = regexRes[2] + + sensitiveRanges.push({ + start: offset, + end: offset + parseInt(regexRes[3]) + }) + + value = value.replace(sensitiveId, '') + } else if (regexRes[4]) { + // is a sensitive value (number, null, false, ...) + const sensitiveId = regexRes[4] + const originalValue = regexRes[5] + + sensitiveRanges.push({ + start: regexRes.index, + end: regexRes.index + originalValue.length + }) + + value = value.replace(sensitiveId, originalValue) + } + + keysRegex.lastIndex = 0 + regexRes = keysRegex.exec(value) + } + } + } else { + value = JSON.stringify(obj, null, 2) + } + + return { value, ranges, sensitiveRanges } +} + +module.exports = { stringifyWithRanges } diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities.js b/packages/dd-trace/src/appsec/iast/vulnerabilities.js index 93fdd9c67c9..d885d3406b0 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities.js @@ -5,6 +5,7 @@ module.exports = { LDAP_INJECTION: 'LDAP_INJECTION', NO_HTTPONLY_COOKIE: 'NO_HTTPONLY_COOKIE', NO_SAMESITE_COOKIE: 'NO_SAMESITE_COOKIE', + NOSQL_MONGODB_INJECTION: 'NOSQL_MONGODB_INJECTION', PATH_TRAVERSAL: 'PATH_TRAVERSAL', SQL_INJECTION: 'SQL_INJECTION', SSRF: 'SSRF', diff --git a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js new file mode 100644 index 00000000000..3daacbdfe78 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js @@ -0,0 +1,151 @@ +'use strict' + +const { prepareTestServerForIastInExpress } = require('../utils') +const axios = require('axios') +const agent = require('../../../plugins/agent') +const path = require('path') +const os = require('os') +const fs = require('fs') +describe('nosql injection detection in mongodb - whole feature', () => { + withVersions('express', 'express', '>4.18.0', expressVersion => { + withVersions('mongodb', 'mongodb', mongodbVersion => { + const vulnerableMethodFilename = 'mongodb-vulnerable-method.js' + let collection, tmpFilePath + + before(() => { + return agent.load(['mongodb'], { client: false }, { flushInterval: 1 }) + }) + + before(async () => { + const { MongoClient } = require(`../../../../../../versions/mongodb@${mongodbVersion}`).get() + const client = new MongoClient('mongodb://127.0.0.1:27017') + await client.connect() + + const db = client.db('test') + collection = db.collection('test-collection') + + const src = path.join(__dirname, 'resources', vulnerableMethodFilename) + tmpFilePath = path.join(os.tmpdir(), vulnerableMethodFilename) + try { + fs.unlinkSync(tmpFilePath) + } catch (e) { + // ignore the error + } + fs.copyFileSync(src, tmpFilePath) + }) + + prepareTestServerForIastInExpress('Test without sanitization middlewares', expressVersion, + (testThatRequestHasVulnerability, testThatRequestHasNoVulnerability) => { + testThatRequestHasVulnerability({ + fn: async (req, res) => { + await collection.find({ + key: req.query.key + }) + res.end() + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + }, + cb: function (vulnerabilities) { + const vulnerability = vulnerabilities[0] + let someRedacted = false + vulnerability.evidence.valueParts.forEach(valuePart => { + if (valuePart.redacted) { + someRedacted = true + } + }) + + expect(someRedacted).to.be.true + } + }) + + testThatRequestHasVulnerability({ + testDescription: 'should have NOSQL_MONGODB_INJECTION vulnerability in correct file and line', + fn: async (req, res) => { + const filter = { + key: req.query.key + } + await require(tmpFilePath)(collection, filter) + + res.end() + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + }, + occurrences: { + occurrences: 1, + location: { + path: vulnerableMethodFilename, + line: 5 + } + } + }) + + testThatRequestHasNoVulnerability(async (req, res) => { + await collection.find({ + key: 'test' + }) + + res.end() + }, 'NOSQL_MONGODB_INJECTION') + }) + + prepareTestServerForIastInExpress('Test without sanitization middlewares and without redaction', expressVersion, + undefined, (testThatRequestHasVulnerability) => { + testThatRequestHasVulnerability({ + fn: async (req, res) => { + await collection.find({ + key: req.query.key + }) + res.end() + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + }, + cb: function (vulnerabilities) { + const vulnerability = vulnerabilities[0] + let someRedacted = false + vulnerability.evidence.valueParts.forEach(valuePart => { + if (valuePart.redacted) { + someRedacted = true + } + }) + + expect(someRedacted).to.be.false + } + }) + }, { + enabled: true, + requestSampling: 100, + maxConcurrentRequests: 100, + maxContextOperations: 100, + redactionEnabled: false + }) + + withVersions('express-mongo-sanitize', 'express-mongo-sanitize', expressMongoSanitizeVersion => { + prepareTestServerForIastInExpress('Test with sanitization middleware', expressVersion, (expressApp) => { + const mongoSanitize = + require(`../../../../../../versions/express-mongo-sanitize@${expressMongoSanitizeVersion}`).get() + expressApp.use(mongoSanitize()) + }, (testThatRequestHasVulnerability, testThatRequestHasNoVulnerability) => { + testThatRequestHasNoVulnerability({ + fn: async (req, res) => { + await collection.find({ + key: req.query.key + }) + + res.end() + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + } + }) + }) + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js new file mode 100644 index 00000000000..624bb26617f --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js @@ -0,0 +1,139 @@ +'use strict' + +const { prepareTestServerForIastInExpress } = require('../utils') +const axios = require('axios') +const agent = require('../../../plugins/agent') +const semver = require('semver') +const os = require('os') +const path = require('path') +const fs = require('fs') + +describe('nosql injection detection in mongodb - whole feature', () => { + withVersions('express', 'express', '>4.18.0', expressVersion => { + withVersions('mongoose', 'mongoose', '>4.0.0', mongooseVersion => { + const vulnerableMethodFilename = 'mongoose-vulnerable-method.js' + let mongoose, Test, tmpFilePath + + before(() => { + return agent.load(['mongoose']) + }) + + before(async () => { + const id = require('../../../../../dd-trace/src/id') + const dbName = id().toString() + mongoose = require(`../../../../../../versions/mongoose@${mongooseVersion}`).get() + + mongoose.connect(`mongodb://localhost:27017/${dbName}`, { + useNewUrlParser: true, + useUnifiedTopology: true + }) + + Test = mongoose.model('Test', { name: String }) + + const src = path.join(__dirname, 'resources', vulnerableMethodFilename) + + tmpFilePath = path.join(os.tmpdir(), vulnerableMethodFilename) + try { + fs.unlinkSync(tmpFilePath) + } catch (e) { + // ignore the error + } + fs.copyFileSync(src, tmpFilePath) + }) + + after(() => { + fs.unlinkSync(tmpFilePath) + return mongoose.disconnect() + }) + + prepareTestServerForIastInExpress('Test with mongoose', expressVersion, + (testThatRequestHasVulnerability, testThatRequestHasNoVulnerability) => { + testThatRequestHasVulnerability({ + fn: async (req, res) => { + Test.find({ + name: req.query.key, + value: [1, 2, + 'value', + false, req.query.key] + }).then(() => { + res.end() + }) + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + } + }) + + testThatRequestHasVulnerability({ + fn: async (req, res) => { + Test.find({ + name: { + child: [req.query.key] + } + }).then(() => { + res.end() + }) + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + } + }) + + testThatRequestHasVulnerability({ + testDescription: 'should have NOSQL_MONGODB_INJECTION vulnerability in correct file and line', + fn: async (req, res) => { + const filter = { + name: { + child: [req.query.key] + } + } + require(tmpFilePath)(Test, filter, () => { + res.end() + }) + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + }, + occurrences: { + occurrences: 1, + location: { + path: vulnerableMethodFilename, + line: 4 + } + } + }) + + if (semver.satisfies(mongooseVersion, '>=6')) { + testThatRequestHasNoVulnerability({ + testDescription: 'should not have NOSQL_MONGODB_INJECTION vulnerability with mongoose.sanitizeFilter', + fn: async (req, res) => { + const filter = mongoose.sanitizeFilter({ + name: { + child: [req.query.key] + } + }) + Test.find(filter).then(() => { + res.end() + }) + }, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/?key=value`).catch(done) + } + }) + } + + testThatRequestHasNoVulnerability(async (req, res) => { + Test.find({ + name: 'test' + }).then(() => { + res.end() + }) + }, 'NOSQL_MONGODB_INJECTION') + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.spec.js new file mode 100644 index 00000000000..8bf10fcdf70 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.spec.js @@ -0,0 +1,127 @@ +'use strict' + +const proxyquire = require('proxyquire') + +const { channel } = require('../../../../../datadog-instrumentations/src/helpers/instrument') +const { + createTransaction, + newTaintedString, + removeTransaction, + getRanges +} = require('../../../../src/appsec/iast/taint-tracking/operations') + +const sanitizeMiddlewareFinished = channel('datadog:express-mongo-sanitize:filter:finish') +const sanitizeMethodFinished = channel('datadog:express-mongo-sanitize:sanitize:finish') + +describe('nosql injection detection in mongodb', () => { + describe('SECURE_MARKS', () => { + let iastContext + const tid = 'transaction_id' + let nosqlInjectionMongodbAnalyzer, MONGODB_NOSQL_SECURE_MARK + + before(() => { + nosqlInjectionMongodbAnalyzer = + proxyquire('../../../../src/appsec/iast/analyzers/nosql-injection-mongodb-analyzer', + { + '../iast-context': { + getIastContext () { + return iastContext + } + } + }) + MONGODB_NOSQL_SECURE_MARK = nosqlInjectionMongodbAnalyzer.MONGODB_NOSQL_SECURE_MARK + }) + + beforeEach(() => { + iastContext = {} + createTransaction(tid, iastContext) + nosqlInjectionMongodbAnalyzer.configure({ enabled: true }) + }) + + afterEach(() => { + removeTransaction(iastContext) + nosqlInjectionMongodbAnalyzer.configure({ enabled: false }) + iastContext = undefined + }) + + describe('express-mongo-sanitize', () => { + describe('middleware', () => { + it('Secure mark is added', () => { + const taintedString = newTaintedString(iastContext, 'value', 'param', 'Request') + const req = { query: { param: taintedString } } + + sanitizeMiddlewareFinished.publish({ + sanitizedProperties: ['body', 'query'], + req + }) + + const sanitizedRanges = getRanges(iastContext, req.query.param) + const notSanitizedRanges = getRanges(iastContext, taintedString) + + expect(sanitizedRanges.length).to.be.equal(1) + expect(notSanitizedRanges.length).to.be.equal(1) + + expect(sanitizedRanges[0].secureMarks).to.be.equal(MONGODB_NOSQL_SECURE_MARK) + expect(notSanitizedRanges[0].secureMarks).to.be.equal(0) + }) + + it('Secure mark is added in nested objects', () => { + const taintedString = newTaintedString(iastContext, 'value', 'param', 'Request') + const req = { body: { key1: { key2: taintedString } } } + + sanitizeMiddlewareFinished.publish({ + sanitizedProperties: ['body'], + req + }) + + const sanitizedRanges = getRanges(iastContext, req.body.key1.key2) + const notSanitizedRanges = getRanges(iastContext, taintedString) + + expect(sanitizedRanges.length).to.be.equal(1) + expect(notSanitizedRanges.length).to.be.equal(1) + + expect(sanitizedRanges[0].secureMarks).to.be.equal(MONGODB_NOSQL_SECURE_MARK) + expect(notSanitizedRanges[0].secureMarks).to.be.equal(0) + }) + }) + + describe('sanitize method', () => { + it('Secure mark is added', () => { + const taintedString = newTaintedString(iastContext, 'value', 'param', 'Request') + const sanitizedObject = { param: taintedString } + + sanitizeMethodFinished.publish({ + sanitizedObject + }) + + const sanitizedRanges = getRanges(iastContext, sanitizedObject.param) + const notSanitizedRanges = getRanges(iastContext, taintedString) + + expect(sanitizedRanges.length).to.be.equal(1) + expect(notSanitizedRanges.length).to.be.equal(1) + + expect(notSanitizedRanges[0].secureMarks).to.be.equal(0) + expect(sanitizedRanges[0].secureMarks).to.be.equal(MONGODB_NOSQL_SECURE_MARK) + }) + + it('Secure mark is added in nested objects', () => { + const taintedString = newTaintedString(iastContext, 'value', 'param', 'Request') + const sanitizedObject = { key1: { key2: taintedString } } + + sanitizeMethodFinished.publish({ + sanitizedObject + }) + + const sanitizedRanges = getRanges(iastContext, sanitizedObject.key1.key2) + const notSanitizedRanges = getRanges(iastContext, taintedString) + + expect(sanitizedRanges.length).to.be.equal(1) + expect(notSanitizedRanges.length).to.be.equal(1) + + expect(sanitizedRanges[0].secureMarks).to.be.equal(MONGODB_NOSQL_SECURE_MARK) + expect(notSanitizedRanges[0].secureMarks).to.be.equal(0) + }) + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/mongodb-vulnerable-method.js b/packages/dd-trace/test/appsec/iast/analyzers/resources/mongodb-vulnerable-method.js new file mode 100644 index 00000000000..99b0eebc938 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/mongodb-vulnerable-method.js @@ -0,0 +1,6 @@ +'use strict' + +module.exports = function vulnerableMethod (collection, filter) { + // comment to force a vulnerability in line 5 instead of 4 + return collection.find(filter) +} diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/mongoose-vulnerable-method.js b/packages/dd-trace/test/appsec/iast/analyzers/resources/mongoose-vulnerable-method.js new file mode 100644 index 00000000000..129ab3e598b --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/mongoose-vulnerable-method.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = function vulnerableMethod (Test, filter, cb) { + Test.find(filter).then(cb) +} diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/secure-marks-generator.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/secure-marks-generator.spec.js new file mode 100644 index 00000000000..e5ddb8b6bbe --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/secure-marks-generator.spec.js @@ -0,0 +1,18 @@ +'use strict' + +const { getNextSecureMark, reset } = require('../../../../src/appsec/iast/taint-tracking/secure-marks-generator') +describe('test secure marks generator', () => { + beforeEach(() => { + reset() + }) + + after(() => { + reset() + }) + + it('should generate numbers in order', () => { + for (let i = 0; i < 100; i++) { + expect(getNextSecureMark()).to.be.equal(1 << i) + } + }) +}) diff --git a/packages/dd-trace/test/appsec/iast/utils.js b/packages/dd-trace/test/appsec/iast/utils.js index 670f7bec653..b2847d02db3 100644 --- a/packages/dd-trace/test/appsec/iast/utils.js +++ b/packages/dd-trace/test/appsec/iast/utils.js @@ -145,11 +145,12 @@ function checkNoVulnerabilityInRequest (vulnerability, config, done, makeRequest .then(done) .catch(done) if (makeRequest) { - makeRequest(done) + makeRequest(done, config) } else { axios.get(`http://localhost:${config.port}/`).catch(done) } } + function checkVulnerabilityInRequest (vulnerability, occurrencesAndLocation, cb, makeRequest, config, done) { let location let occurrences = occurrencesAndLocation @@ -200,7 +201,7 @@ function checkVulnerabilityInRequest (vulnerability, occurrencesAndLocation, cb, .then(done) .catch(done) if (makeRequest) { - makeRequest(done) + makeRequest(done, config) } else { axios.get(`http://localhost:${config.port}/`).catch(done) } @@ -269,7 +270,12 @@ function prepareTestServerForIast (description, tests, iastConfig) { }) } -function prepareTestServerForIastInExpress (description, expressVersion, tests) { +function prepareTestServerForIastInExpress (description, expressVersion, loadMiddlewares, tests, iastConfig) { + if (arguments.length === 3) { + tests = loadMiddlewares + loadMiddlewares = undefined + } + describe(description, () => { const config = {} let listener, app, server @@ -288,6 +294,9 @@ function prepareTestServerForIastInExpress (description, expressVersion, tests) const express = require(`../../../../../versions/express@${expressVersion}`).get() const bodyParser = require(`../../../../../versions/body-parser`).get() const expressApp = express() + + if (loadMiddlewares) loadMiddlewares(expressApp) + expressApp.use(bodyParser.json()) expressApp.all('/', listener) @@ -299,7 +308,7 @@ function prepareTestServerForIastInExpress (description, expressVersion, tests) }) }) - beforeEachIastTest() + beforeEachIastTest(iastConfig) afterEach(() => { iast.disable() @@ -312,17 +321,38 @@ function prepareTestServerForIastInExpress (description, expressVersion, tests) }) function testThatRequestHasVulnerability (fn, vulnerability, occurrences, cb, makeRequest) { - it(`should have ${vulnerability} vulnerability`, function (done) { + let testDescription = `should have ${vulnerability} vulnerability` + if (typeof fn === 'object') { + const obj = fn + fn = obj.fn + vulnerability = obj.vulnerability + occurrences = obj.occurrences + cb = obj.cb + makeRequest = obj.makeRequest + testDescription = obj.testDescription || testDescription + } + + it(testDescription, function (done) { this.timeout(5000) app = fn + checkVulnerabilityInRequest(vulnerability, occurrences, cb, makeRequest, config, done) }) } - function testThatRequestHasNoVulnerability (fn, vulnerability) { - it(`should not have ${vulnerability} vulnerability`, function (done) { + function testThatRequestHasNoVulnerability (fn, vulnerability, makeRequest) { + let testDescription = `should not have ${vulnerability} vulnerability` + if (typeof fn === 'object') { + const obj = fn + fn = obj.fn + vulnerability = obj.vulnerability + makeRequest = obj.makeRequest + testDescription = obj.testDescription || testDescription + } + + it(testDescription, function (done) { app = fn - checkNoVulnerabilityInRequest(vulnerability, config, done) + checkNoVulnerabilityInRequest(vulnerability, config, done, makeRequest) }) } diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/resources/evidence-redaction-suite.json b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/resources/evidence-redaction-suite.json index 89fc975a262..271013c3b5b 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/resources/evidence-redaction-suite.json +++ b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/resources/evidence-redaction-suite.json @@ -2733,6 +2733,368 @@ } ] } + }, + { + "type": "VULNERABILITIES", + "description": "Mongodb json query with sensitive source", + "input": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "value": { + "password": "1234" + }, + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "password", + "parameterValue": "1234" + } + } + ], + "rangesToApply": { + "password": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "password", + "parameterValue": "1234" + } + } + ] + } + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "password", + "redacted": true, + "pattern": "abcd" + } + ], + "vulnerabilities": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "valueParts": [ + { + "value": "{\n \"password\": \"" + }, + { + "source": 0, + "redacted": true, + "pattern": "abcd" + }, + { + "value": "\"\n}" + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Mongodb json query with non sensitive source", + "input": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "value": { + "username": "user" + }, + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ], + "rangesToApply": { + "username": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ] + } + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "username", + "redacted": true, + "pattern": "abcd" + } + ], + "vulnerabilities": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "valueParts": [ + { + "value": "{\n \"username\": \"" + }, + { + "source": 0, + "redacted": true, + "pattern": "abcd" + }, + { + "value": "\"\n}" + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Mongodb json query with partial non sensitive source", + "input": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "value": { + "username": "user" + }, + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "PREFIX_user" + } + } + ], + "rangesToApply": { + "username": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "PREFIX_user" + } + } + ] + } + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "username", + "redacted": true, + "pattern": "abcdefghijk" + } + ], + "vulnerabilities": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "valueParts": [ + { + "value": "{\n \"username\": \"" + }, + { + "source": 0, + "redacted": true, + "pattern": "hijk" + }, + { + "value": "\"\n}" + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Mongodb json query with non sensitive source and other fields", + "input": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "value": { + "username": "user", + "secret": "SECRET_VALUE" + }, + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ], + "rangesToApply": { + "username": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ] + } + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "username", + "redacted": true, + "pattern": "abcd" + } + ], + "vulnerabilities": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "valueParts": [ + { + "value": "{\n \"username\": \"" + }, + { + "source": 0, + "redacted": true, + "pattern": "abcd" + }, + { + "value": "\",\n \"secret\": \"" + }, + { + "redacted": true + }, + { + "value": "\"\n}" + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Mongodb json query with sensitive value in a key", + "input": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "value": { + "username": "user", + "token_usage": { + "bearer zss8dR9QP81A": 10 + } + }, + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ], + "rangesToApply": { + "username": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ] + } + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "username", + "redacted": true, + "pattern": "abcd" + } + ], + "vulnerabilities": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "valueParts": [ + { + "value": "{\n \"username\": \"" + }, + { + "source": 0, + "redacted": true, + "pattern": "abcd" + }, + { + "value": "\",\n \"token_usage\": {\n \"" + }, + { + "redacted": true + }, + { + "value": "\": " + }, + { + "redacted": true + }, + { + "value": "\n }\n}" + } + ] + } + } + ] + } } ] } diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/utils.spec.js b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/utils.spec.js new file mode 100644 index 00000000000..f37696f6d80 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/utils.spec.js @@ -0,0 +1,342 @@ +'use strict' + +const { stringifyWithRanges } = require('../../../../src/appsec/iast/vulnerabilities-formatter/utils') +describe('test vulnerabiilty-formatter utils', () => { + describe('stringifyWithRanges', () => { + describe('loadSensitiveRanges = false', () => { + it('Undefined ranges', () => { + const src = { key: 'value' } + const { value, ranges } = stringifyWithRanges(src) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + expect(ranges.length).to.be.equal(0) + }) + + it('Empty ranges', () => { + const src = { key: 'value' } + const { value, ranges } = stringifyWithRanges(src, {}) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + expect(ranges.length).to.be.equal(0) + }) + + it('Range in first level', () => { + const src = { key: 'value' } + const iinfo = { type: 'TEST' } + const range = { + start: 0, end: 5, iinfo + } + + const { value, ranges } = stringifyWithRanges(src, { key: [range] }) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + + expect(ranges).to.be.deep.equal([ + { start: 12, end: 17, iinfo } + ]) + }) + + it('Range in first level and other in nested level', () => { + const src = { + key: 'value', + withChildren: { + notTainted: 'not tainted', + partiallyTainted: 'nnnYYYYYnnnYYY' // YYYYY is tainted + } + } + const iinfo = { type: 'TEST' } + const firstRanges = [{ + start: 0, end: 5, iinfo + }] + const secondRanges = [ + { start: 3, end: 8, iinfo }, + { start: 11, end: 14, iinfo } + ] + + const { value, ranges } = stringifyWithRanges(src, { + key: firstRanges, + 'withChildren.partiallyTainted': secondRanges + }) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + + expect(ranges).to.be.deep.equal([ + { start: 12, end: 17, iinfo }, + { start: 101, end: 106, iinfo }, + { start: 109, end: 112, iinfo } + ]) + }) + + it('Empty ranges in array', () => { + const src = [{ key: ['value'] }] + const { value, ranges } = stringifyWithRanges(src, {}) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + expect(ranges.length).to.be.equal(0) + }) + + it('Range in array item', () => { + const src = ['value'] + const iinfo = { type: 'TEST' } + const range = { + start: 0, end: 5, iinfo + } + const { value, ranges } = stringifyWithRanges(src, { '0': [range] }) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + + expect(ranges).to.be.deep.equal([ + { start: 5, end: 10, iinfo } + ]) + }) + + it('Ranges in array and nested objects', () => { + const src = [ + { + key: 'tainted1' + }, + 'tainted2', + 'nnnYYYYnYYn', + { + key2: [{ + key21: 'nnYYn', + key22: 'nottainted' + }] + }, + 'nottainted' + ] + const iinfo = { type: 'TEST' } + const objRanges = { + '0.key': [{ start: 0, end: 8, iinfo }], + '1': [{ start: 0, end: 8, iinfo }], + '2': [ + { start: 3, end: 7, iinfo }, + { start: 8, end: 10, iinfo } + ], + '3.key2.0.key21': [{ start: 2, end: 4, iinfo }] + } + + const { value, ranges } = stringifyWithRanges(src, objRanges) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + + expect(ranges).to.be.deep.equal([ + { start: 18, end: 26, iinfo }, + { start: 36, end: 44, iinfo }, + { start: 53, end: 57, iinfo }, + { start: 58, end: 60, iinfo }, + { start: 110, end: 112, iinfo } + ]) + }) + }) + + describe('loadSensitiveRanges = true', () => { + it('Undefined ranges', () => { + const src = { key: 'value', numberKey: 123, nullKey: null, falseKey: false, trueKey: true } + + const { value, ranges, sensitiveRanges } = stringifyWithRanges(src, null, true) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + expect(ranges.length).to.be.equal(0) + + expect(sensitiveRanges).to.be.deep.equal([ + { start: 12, end: 17 }, + { start: 35, end: 38 }, + { start: 53, end: 57 }, + { start: 73, end: 78 }, + { start: 93, end: 97 } + ]) + }) + + it('Range in first level', () => { + const src = { key: 'value' } + const iinfo = { type: 'TEST' } + const range = { + start: 0, end: 5, iinfo + } + + const { value, ranges, sensitiveRanges } = stringifyWithRanges(src, { key: [range] }, true) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + + expect(ranges).to.be.deep.equal([ + { start: 12, end: 17, iinfo } + ]) + + expect(sensitiveRanges).to.be.deep.equal([ + { start: 12, end: 17 } + ]) + }) + + it('Sensitive key', () => { + const src = { 'bearer zss8dR9QP81A': 'value' } + + const { value, ranges, sensitiveRanges } = stringifyWithRanges(src, {}, true) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + expect(ranges.length).to.be.equal(0) + + expect(sensitiveRanges).to.be.deep.equal([ + { start: 5, end: 24 }, + { start: 28, end: 33 } + ]) + }) + + it('Range in first level and other in nested level', () => { + const src = { + key: 'value', + withChildren: { + notTainted: 'not tainted', + partiallyTainted: 'nnnYYYYYnnnYYY' // YYYYY is tainted + } + } + const iinfo = { type: 'TEST' } + const firstRanges = [{ + start: 0, end: 5, iinfo + }] + const secondRanges = [ + { start: 3, end: 8, iinfo }, + { start: 11, end: 14, iinfo } + ] + + const { value, ranges, sensitiveRanges } = stringifyWithRanges(src, { + key: firstRanges, + 'withChildren.partiallyTainted': secondRanges + }, true) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + + expect(ranges).to.be.deep.equal([ + { start: 12, end: 17, iinfo }, + { start: 101, end: 106, iinfo }, + { start: 109, end: 112, iinfo } + ]) + + expect(sensitiveRanges).to.be.deep.equal([ + { start: 12, end: 17 }, + { start: 59, end: 70 }, + { start: 98, end: 112 } + ]) + }) + + it('Range in first level, other in nested level and sensitive key', () => { + const src = { + key: 'value', + withChildren: { + 'bearer zss8dR9QP81A': 'not tainted', + partiallyTainted: 'nnnYYYYYnnnYYY' // YYYYY is tainted + } + } + const iinfo = { type: 'TEST' } + const firstRanges = [{ + start: 0, end: 5, iinfo + }] + const secondRanges = [ + { start: 3, end: 8, iinfo }, + { start: 11, end: 14, iinfo } + ] + + const { value, ranges, sensitiveRanges } = stringifyWithRanges(src, { + key: firstRanges, + 'withChildren.partiallyTainted': secondRanges + }, true) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + + expect(ranges).to.be.deep.equal([ + { start: 12, end: 17, iinfo }, + { start: 110, end: 115, iinfo }, + { start: 118, end: 121, iinfo } + ]) + + expect(sensitiveRanges).to.be.deep.equal([ + { start: 12, end: 17 }, + { start: 45, end: 64 }, + { start: 68, end: 79 }, + { start: 107, end: 121 } + ]) + }) + + it('Empty ranges in array', () => { + const src = [{ key: ['value'] }] + + const { value, ranges, sensitiveRanges } = stringifyWithRanges(src, {}, true) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + expect(ranges.length).to.be.equal(0) + + expect(sensitiveRanges).to.be.deep.equal([ + { start: 26, end: 31 } + ]) + }) + + it('Range in array item', () => { + const src = ['value'] + const iinfo = { type: 'TEST' } + const range = { + start: 0, end: 5, iinfo + } + + const { value, ranges, sensitiveRanges } = stringifyWithRanges(src, { '0': [range] }, true) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + + expect(ranges).to.be.deep.equal([ + { start: 5, end: 10, iinfo } + ]) + + expect(sensitiveRanges).to.be.deep.equal([ + { start: 5, end: 10 } + ]) + }) + + it('Ranges in array and nested objects', () => { + const src = [ + { + key: 'tainted1' + }, + 'tainted2', + 'nnnYYYYnYYn', + { + key2: [{ + key21: 'nnYYn', + key22: 'nottainted' + }] + }, + 'nottainted' + ] + const iinfo = { type: 'TEST' } + const objRanges = { + '0.key': [{ start: 0, end: 8, iinfo }], + '1': [{ start: 0, end: 8, iinfo }], + '2': [ + { start: 3, end: 7, iinfo }, + { start: 8, end: 10, iinfo } + ], + '3.key2.0.key21': [{ start: 2, end: 4, iinfo }] + } + + const { value, ranges, sensitiveRanges } = stringifyWithRanges(src, objRanges, true) + + expect(value).to.be.equal(JSON.stringify(src, null, 2)) + + expect(ranges).to.be.deep.equal([ + { start: 18, end: 26, iinfo }, + { start: 36, end: 44, iinfo }, + { start: 53, end: 57, iinfo }, + { start: 58, end: 60, iinfo }, + { start: 110, end: 112, iinfo } + ]) + + expect(sensitiveRanges).to.be.deep.equal([ + { start: 18, end: 26 }, + { start: 36, end: 44 }, + { start: 50, end: 61 }, + { start: 108, end: 113 }, + { start: 134, end: 144 }, + { start: 168, end: 178 } + ]) + }) + }) + }) +}) diff --git a/packages/dd-trace/test/plugins/externals.json b/packages/dd-trace/test/plugins/externals.json index 9cab90ccfc3..c7707705934 100644 --- a/packages/dd-trace/test/plugins/externals.json +++ b/packages/dd-trace/test/plugins/externals.json @@ -41,6 +41,24 @@ "versions": [">=1.4.6"] } ], + "express-mongo-sanitize": [ + { + "name": "mongodb", + "versions": [">=3.3"] + }, + { + "name": "mongodb-core", + "versions": ["3.2.7"] + }, + { + "name": "express", + "versions": [">=4", ">=4.0.0 <4.3.0", ">=4.3.0"] + }, + { + "name": "body-parser", + "versions": ["1.20.1"] + } + ], "fastify": [ { "name": "fastify", @@ -177,6 +195,14 @@ { "name": "mongodb-core", "versions": ["3.2.7"] + }, + { + "name": "express", + "versions": [">=4", ">=4.0.0 <4.3.0", ">=4.3.0"] + }, + { + "name": "body-parser", + "versions": ["1.20.1"] } ], "next": [ @@ -200,15 +226,14 @@ } ], "passport-local": [ - { - "name": "passport", - "versions": [">=0.4.1"] - }, + { + "name": "passport", + "versions": [">=0.4.1"] + }, { "name": "express", "versions": [">=4.16.2"] } - ], "pg": [ { diff --git a/yarn.lock b/yarn.lock index 96918ac9a74..6279483fb3f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -400,10 +400,10 @@ lru-cache "^7.14.0" node-gyp-build "^4.5.0" -"@datadog/native-iast-taint-tracking@1.5.0": - version "1.5.0" - resolved "https://registry.yarnpkg.com/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.5.0.tgz#1a55eca6692079ac6167696682acb972aa0b0181" - integrity sha512-SOWIk1M6PZH0osNB191Voz2rKBPoF5hISWVSK9GiJPrD40+xjib1Z/bFDV7EkDn3kjOyordSBdNPG5zOqZJdyg== +"@datadog/native-iast-taint-tracking@1.6.1": + version "1.6.1" + resolved "https://registry.yarnpkg.com/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.6.1.tgz#fcf2f376797dbfc368d6cb3636b922372d2be50e" + integrity sha512-V1X0UbEROcEkqP4IIovqK9uu8jPXq80m8xOW1Vb6xJ9otO3eBphvDFDSa/OJ4pEYhajjjmGlraLlV6rXjaSGlQ== dependencies: node-gyp-build "^3.9.0" From e6d4c8b517e45aa8c025096872f27d1f992e2d26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 6 Oct 2023 16:42:34 +0200 Subject: [PATCH 013/147] [ci-visibility] Unskippable tests for mocha (#3661) --- integration-tests/ci-visibility.spec.js | 241 ++++++++++-------- integration-tests/ci-visibility/run-jest.js | 2 +- integration-tests/ci-visibility/run-jest.mjs | 2 +- integration-tests/ci-visibility/run-mocha.js | 11 +- integration-tests/ci-visibility/run-mocha.mjs | 13 +- packages/datadog-instrumentations/src/jest.js | 31 ++- .../datadog-instrumentations/src/mocha.js | 23 +- packages/datadog-plugin-jest/src/index.js | 8 +- packages/datadog-plugin-jest/src/util.js | 6 +- .../datadog-plugin-jest/test/util.spec.js | 49 ++++ packages/datadog-plugin-mocha/src/index.js | 25 +- packages/dd-trace/src/plugins/util/test.js | 13 +- 12 files changed, 282 insertions(+), 142 deletions(-) diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index 068cd52705c..4ff2ec1e0ec 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -30,11 +30,13 @@ const { const hookFile = 'dd-trace/loader-hook.mjs' const mochaCommonOptions = { + name: 'mocha', expectedStdout: '2 passing', extraStdout: 'end event: can add event listeners to mocha' } const jestCommonOptions = { + name: 'jest', dependencies: ['jest', 'chai', 'jest-jasmine2'], expectedStdout: 'Test Suites: 2 passed', expectedCoverageFiles: [ @@ -47,7 +49,6 @@ const jestCommonOptions = { const testFrameworks = [ { ...mochaCommonOptions, - name: 'mocha', testFile: 'ci-visibility/run-mocha.js', dependencies: ['mocha', 'chai', 'nyc'], expectedCoverageFiles: [ @@ -57,12 +58,10 @@ const testFrameworks = [ 'ci-visibility/test/ci-visibility-test-2.js' ], runTestsWithCoverageCommand: './node_modules/nyc/bin/nyc.js -r=text-summary node ./ci-visibility/run-mocha.js', - coverageMessage: 'Lines : 80%', type: 'commonJS' }, { ...mochaCommonOptions, - name: 'mocha', testFile: 'ci-visibility/run-mocha.mjs', dependencies: ['mocha', 'chai', 'nyc', '@istanbuljs/esm-loader-hook'], expectedCoverageFiles: [ @@ -75,19 +74,16 @@ const testFrameworks = [ `./node_modules/nyc/bin/nyc.js -r=text-summary ` + `node --loader=./node_modules/@istanbuljs/esm-loader-hook/index.js ` + `--loader=${hookFile} ./ci-visibility/run-mocha.mjs`, - coverageMessage: 'Lines : 78.57%', type: 'esm' }, { ...jestCommonOptions, - name: 'jest', testFile: 'ci-visibility/run-jest.js', runTestsWithCoverageCommand: 'node ./ci-visibility/run-jest.js', type: 'commonJS' }, { ...jestCommonOptions, - name: 'jest', testFile: 'ci-visibility/run-jest.mjs', runTestsWithCoverageCommand: `node --loader=${hookFile} ./ci-visibility/run-jest.mjs`, type: 'esm' @@ -102,7 +98,6 @@ testFrameworks.forEach(({ extraStdout, expectedCoverageFiles, runTestsWithCoverageCommand, - coverageMessage, type }) => { // temporary fix for failing esm tests on the CI, skip for now for the release and comeback to solve the issue @@ -248,7 +243,7 @@ testFrameworks.forEach(({ cwd, env: { ...getCiVisAgentlessConfig(receiver.port), - TEST_REGEX: 'sharding-test/sharding-test', + TESTS_TO_RUN: 'sharding-test/sharding-test', TEST_SHARD: '2/2' }, stdio: 'inherit' @@ -284,7 +279,7 @@ testFrameworks.forEach(({ cwd, env: { ...getCiVisAgentlessConfig(receiver.port), - TEST_REGEX: 'sharding-test/sharding-test', + TESTS_TO_RUN: 'sharding-test/sharding-test', TEST_SHARD: '1/2' }, stdio: 'inherit' @@ -403,7 +398,7 @@ testFrameworks.forEach(({ ...getCiVisAgentlessConfig(receiver.port), NODE_OPTIONS: '-r dd-trace/ci/init', RUN_IN_PARALLEL: true, - TEST_REGEX: 'timeout-test/timeout-test.js' + TESTS_TO_RUN: 'timeout-test/timeout-test.js' }, stdio: 'pipe' }) @@ -632,8 +627,9 @@ testFrameworks.forEach(({ testOutput += chunk.toString() }) childProcess.on('exit', () => { - if (coverageMessage) { - assert.include(testOutput, coverageMessage) + // coverage report + if (name === 'mocha') { + assert.include(testOutput, 'Lines ') } done() }) @@ -814,118 +810,143 @@ testFrameworks.forEach(({ } ) }) - // TODO: remove conditional when support for mocha is done - if (name === 'jest') { - it('does not skip suites if suite is marked as unskippable', (done) => { - receiver.setSuitesToSkip([ - { - type: 'suite', - attributes: { - suite: 'ci-visibility/unskippable-test/test-to-skip.js' - } - }, - { - type: 'suite', - attributes: { - suite: 'ci-visibility/unskippable-test/test-unskippable.js' - } + it('does not skip suites if suite is marked as unskippable', (done) => { + receiver.setSuitesToSkip([ + { + type: 'suite', + attributes: { + suite: 'ci-visibility/unskippable-test/test-to-skip.js' } - ]) + }, + { + type: 'suite', + attributes: { + suite: 'ci-visibility/unskippable-test/test-unskippable.js' + } + } + ]) - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - const suites = payloads - .flatMap(({ payload }) => payload.events) - .filter(event => event.type === 'test_suite_end') + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const suites = events.filter(event => event.type === 'test_suite_end') - assert.equal(suites.length, 2) + assert.equal(suites.length, 2) - const skippedSuite = suites.find( - event => event.content.resource === 'test_suite.ci-visibility/unskippable-test/test-to-skip.js' - ) - const forcedToRunSuite = suites.find( - event => event.content.resource === 'test_suite.ci-visibility/unskippable-test/test-unskippable.js' - ) + const testSession = events.find(event => event.type === 'test_session_end').content + const testModule = events.find(event => event.type === 'test_module_end').content + assert.propertyVal(testSession.meta, TEST_ITR_FORCED_RUN, 'true') + assert.propertyVal(testSession.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_FORCED_RUN, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_UNSKIPPABLE, 'true') - assert.propertyVal(skippedSuite.content.meta, TEST_STATUS, 'skip') - assert.notProperty(skippedSuite.content.meta, TEST_ITR_UNSKIPPABLE) - assert.notProperty(skippedSuite.content.meta, TEST_ITR_FORCED_RUN) + const skippedSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/unskippable-test/test-to-skip.js' + ) + const forcedToRunSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/unskippable-test/test-unskippable.js' + ) - assert.propertyVal(forcedToRunSuite.content.meta, TEST_STATUS, 'pass') - assert.propertyVal(forcedToRunSuite.content.meta, TEST_ITR_UNSKIPPABLE, 'true') - assert.propertyVal(forcedToRunSuite.content.meta, TEST_ITR_FORCED_RUN, 'true') - }, 25000) + assert.propertyVal(skippedSuite.content.meta, TEST_STATUS, 'skip') + assert.notProperty(skippedSuite.content.meta, TEST_ITR_UNSKIPPABLE) + assert.notProperty(skippedSuite.content.meta, TEST_ITR_FORCED_RUN) - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: { - ...getCiVisAgentlessConfig(receiver.port), - TEST_REGEX: 'unskippable-test/test-' - }, - stdio: 'inherit' - } - ) + assert.propertyVal(forcedToRunSuite.content.meta, TEST_STATUS, 'pass') + assert.propertyVal(forcedToRunSuite.content.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(forcedToRunSuite.content.meta, TEST_ITR_FORCED_RUN, 'true') + }, 25000) - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) - }) - }) - it('only sets forced to run if suite was going to be skipped by ITR', (done) => { - receiver.setSuitesToSkip([ - { - type: 'suite', - attributes: { - suite: 'ci-visibility/unskippable-test/test-to-skip.js' - } - } + let TESTS_TO_RUN = 'unskippable-test/test-' + if (name === 'mocha') { + TESTS_TO_RUN = JSON.stringify([ + './unskippable-test/test-to-skip.js', + './unskippable-test/test-unskippable.js' ]) + } - const eventsPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { - const suites = payloads - .flatMap(({ payload }) => payload.events) - .filter(event => event.type === 'test_suite_end') + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TESTS_TO_RUN + }, + stdio: 'inherit' + } + ) - assert.equal(suites.length, 2) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + it('only sets forced to run if suite was going to be skipped by ITR', (done) => { + receiver.setSuitesToSkip([ + { + type: 'suite', + attributes: { + suite: 'ci-visibility/unskippable-test/test-to-skip.js' + } + } + ]) - const skippedSuite = suites.find( - event => event.content.resource === 'test_suite.ci-visibility/unskippable-test/test-to-skip.js' - ) - const nonSkippedSuite = suites.find( - event => event.content.resource === 'test_suite.ci-visibility/unskippable-test/test-unskippable.js' - ) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const suites = events.filter(event => event.type === 'test_suite_end') - assert.propertyVal(skippedSuite.content.meta, TEST_STATUS, 'skip') + assert.equal(suites.length, 2) - assert.propertyVal(nonSkippedSuite.content.meta, TEST_STATUS, 'pass') - assert.propertyVal(nonSkippedSuite.content.meta, TEST_ITR_UNSKIPPABLE, 'true') - // it was not forced to run because it wasn't going to be skipped - assert.notProperty(nonSkippedSuite.content.meta, TEST_ITR_FORCED_RUN) - }, 25000) + const testSession = events.find(event => event.type === 'test_session_end').content + const testModule = events.find(event => event.type === 'test_module_end').content + assert.notProperty(testSession.meta, TEST_ITR_FORCED_RUN) + assert.propertyVal(testSession.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.notProperty(testModule.meta, TEST_ITR_FORCED_RUN) + assert.propertyVal(testModule.meta, TEST_ITR_UNSKIPPABLE, 'true') + + const skippedSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/unskippable-test/test-to-skip.js' + ).content + const nonSkippedSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/unskippable-test/test-unskippable.js' + ).content + + assert.propertyVal(skippedSuite.meta, TEST_STATUS, 'skip') + + assert.propertyVal(nonSkippedSuite.meta, TEST_STATUS, 'pass') + assert.propertyVal(nonSkippedSuite.meta, TEST_ITR_UNSKIPPABLE, 'true') + // it was not forced to run because it wasn't going to be skipped + assert.notProperty(nonSkippedSuite.meta, TEST_ITR_FORCED_RUN) + }, 25000) - childProcess = exec( - runTestsWithCoverageCommand, - { - cwd, - env: { - ...getCiVisAgentlessConfig(receiver.port), - TEST_REGEX: 'unskippable-test/test-' - }, - stdio: 'inherit' - } - ) + let TESTS_TO_RUN = 'unskippable-test/test-' + if (name === 'mocha') { + TESTS_TO_RUN = JSON.stringify([ + './unskippable-test/test-to-skip.js', + './unskippable-test/test-unskippable.js' + ]) + } - childProcess.on('exit', () => { - eventsPromise.then(() => { - done() - }).catch(done) - }) + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TESTS_TO_RUN + }, + stdio: 'inherit' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) }) - } + }) it('sets _dd.ci.itr.tests_skipped to false if the received suite is not skipped', (done) => { receiver.setSuitesToSkip([{ type: 'suite', @@ -1099,9 +1120,9 @@ testFrameworks.forEach(({ testOutput += chunk.toString() }) childProcess.on('exit', () => { - // check that reported coverage is still the same - if (coverageMessage) { - assert.include(testOutput, coverageMessage) + // coverage report + if (name === 'mocha') { + assert.include(testOutput, 'Lines ') } done() }) diff --git a/integration-tests/ci-visibility/run-jest.js b/integration-tests/ci-visibility/run-jest.js index be8c288ed89..822c132f1bc 100644 --- a/integration-tests/ci-visibility/run-jest.js +++ b/integration-tests/ci-visibility/run-jest.js @@ -4,7 +4,7 @@ const options = { projects: [__dirname], testPathIgnorePatterns: ['/node_modules/'], cache: false, - testRegex: process.env.TEST_REGEX ? new RegExp(process.env.TEST_REGEX) : /test\/ci-visibility-test/, + testRegex: process.env.TESTS_TO_RUN ? new RegExp(process.env.TESTS_TO_RUN) : /test\/ci-visibility-test/, coverage: true, runInBand: true, shard: process.env.TEST_SHARD || undefined diff --git a/integration-tests/ci-visibility/run-jest.mjs b/integration-tests/ci-visibility/run-jest.mjs index 4fdc690ff37..3bd90cb91ca 100644 --- a/integration-tests/ci-visibility/run-jest.mjs +++ b/integration-tests/ci-visibility/run-jest.mjs @@ -7,7 +7,7 @@ const options = { projects: [__dirname], testPathIgnorePatterns: ['/node_modules/'], cache: false, - testRegex: process.env.TEST_REGEX ? new RegExp(process.env.TEST_REGEX) : /test\/ci-visibility-test/, + testRegex: process.env.TESTS_TO_RUN ? new RegExp(process.env.TESTS_TO_RUN) : /test\/ci-visibility-test/, coverage: true, runInBand: true, shard: process.env.TEST_SHARD || undefined diff --git a/integration-tests/ci-visibility/run-mocha.js b/integration-tests/ci-visibility/run-mocha.js index 728ea799210..fc767f4051f 100644 --- a/integration-tests/ci-visibility/run-mocha.js +++ b/integration-tests/ci-visibility/run-mocha.js @@ -3,8 +3,15 @@ const Mocha = require('mocha') const mocha = new Mocha({ parallel: !!process.env.RUN_IN_PARALLEL }) -mocha.addFile(require.resolve('./test/ci-visibility-test.js')) -mocha.addFile(require.resolve('./test/ci-visibility-test-2.js')) +if (process.env.TESTS_TO_RUN) { + const tests = JSON.parse(process.env.TESTS_TO_RUN) + tests.forEach(test => { + mocha.addFile(require.resolve(test)) + }) +} else { + mocha.addFile(require.resolve('./test/ci-visibility-test.js')) + mocha.addFile(require.resolve('./test/ci-visibility-test-2.js')) +} mocha.run(() => { if (process.send) { process.send('finished') diff --git a/integration-tests/ci-visibility/run-mocha.mjs b/integration-tests/ci-visibility/run-mocha.mjs index 4f1015cf9e3..f4473e262f3 100644 --- a/integration-tests/ci-visibility/run-mocha.mjs +++ b/integration-tests/ci-visibility/run-mocha.mjs @@ -4,8 +4,17 @@ import { fileURLToPath } from 'url' const mocha = new Mocha({ parallel: !!process.env.RUN_IN_PARALLEL }) -mocha.addFile(fileURLToPath(new URL('./test/ci-visibility-test.js', import.meta.url))) -mocha.addFile(fileURLToPath(new URL('./test/ci-visibility-test-2.js', import.meta.url))) + +if (process.env.TESTS_TO_RUN) { + const tests = JSON.parse(process.env.TESTS_TO_RUN) + tests.forEach(test => { + mocha.addFile(fileURLToPath(new URL(test), import.meta.url)) + }) +} else { + mocha.addFile(fileURLToPath(new URL('./test/ci-visibility-test.js', import.meta.url))) + mocha.addFile(fileURLToPath(new URL('./test/ci-visibility-test-2.js', import.meta.url))) +} + mocha.run(() => { if (process.send) { process.send('finished') diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index 2162fb42255..c8e5ee60c52 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -46,6 +46,8 @@ let isCodeCoverageEnabled = false let isSuitesSkippingEnabled = false let isSuitesSkipped = false let numSkippedSuites = 0 +let hasUnskippableSuites = false +let hasForcedToRunSuites = false const sessionAsyncResource = new AsyncResource('bound-anonymous-fn') @@ -205,15 +207,17 @@ addHook({ const [test] = shardedTests const rootDir = test && test.context && test.context.config && test.context.config.rootDir - const { skippedSuites, suitesToRun } = getJestSuitesToRun(skippableSuites, shardedTests, rootDir || process.cwd()) + const jestSuitesToRun = getJestSuitesToRun(skippableSuites, shardedTests, rootDir || process.cwd()) + hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites + hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites - isSuitesSkipped = suitesToRun.length !== shardedTests.length - numSkippedSuites = skippedSuites.length + isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== shardedTests.length + numSkippedSuites = jestSuitesToRun.skippedSuites.length - itrSkippedSuitesCh.publish({ skippedSuites, frameworkVersion }) + itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion }) skippableSuites = [] - return suitesToRun + return jestSuitesToRun.suitesToRun }) return sequencerPackage }) @@ -285,7 +289,9 @@ function cliWrapper (cli, jestVersion) { isSuitesSkippingEnabled, isCodeCoverageEnabled, testCodeCoverageLinesTotal, - numSkippedSuites + numSkippedSuites, + hasUnskippableSuites, + hasForcedToRunSuites }) }) @@ -500,16 +506,19 @@ addHook({ const testPaths = await getTestPaths.apply(this, arguments) const { tests } = testPaths - const { skippedSuites, suitesToRun } = getJestSuitesToRun(skippableSuites, tests, rootDir) + const jestSuitesToRun = getJestSuitesToRun(skippableSuites, tests, rootDir) - isSuitesSkipped = suitesToRun.length !== tests.length - numSkippedSuites = skippedSuites.length + hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites + hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites - itrSkippedSuitesCh.publish({ skippedSuites, frameworkVersion }) + isSuitesSkipped = jestSuitesToRun.suitesToRun.length !== tests.length + numSkippedSuites = jestSuitesToRun.skippedSuites.length + + itrSkippedSuitesCh.publish({ skippedSuites: jestSuitesToRun.skippedSuites, frameworkVersion }) skippableSuites = [] - return { ...testPaths, tests: suitesToRun } + return { ...testPaths, tests: jestSuitesToRun.suitesToRun } }) return searchSourcePackage diff --git a/packages/datadog-instrumentations/src/mocha.js b/packages/datadog-instrumentations/src/mocha.js index 07892e18676..f87d5ffe8e8 100644 --- a/packages/datadog-instrumentations/src/mocha.js +++ b/packages/datadog-instrumentations/src/mocha.js @@ -1,9 +1,10 @@ const { createCoverageMap } = require('istanbul-lib-coverage') +const { isMarkedAsUnskippable } = require('../../datadog-plugin-jest/src/util') + const { addHook, channel, AsyncResource } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') const log = require('../../dd-trace/src/log') - const { getCoveredFilenamesFromCoverage, resetCoverage, @@ -50,6 +51,8 @@ let suitesToSkip = [] let frameworkVersion let isSuitesSkipped = false let skippedSuites = [] +const unskippableSuites = [] +let isForcedToRun = false function getSuitesByTestFile (root) { const suitesByTestFile = {} @@ -104,7 +107,8 @@ function getFilteredSuites (originalSuites) { return originalSuites.reduce((acc, suite) => { const testPath = getTestSuitePath(suite.file, process.cwd()) const shouldSkip = suitesToSkip.includes(testPath) - if (shouldSkip) { + const isUnskippable = unskippableSuites.includes(suite.file) + if (shouldSkip && !isUnskippable) { acc.skippedSuites.add(testPath) } else { acc.suitesToRun.push(suite) @@ -151,7 +155,9 @@ function mochaHook (Runner) { status, isSuitesSkipped, testCodeCoverageLinesTotal, - numSkippedSuites: skippedSuites.length + numSkippedSuites: skippedSuites.length, + hasForcedToRunSuites: isForcedToRun, + hasUnskippableSuites: !!unskippableSuites.length }) })) @@ -172,8 +178,10 @@ function mochaHook (Runner) { if (!asyncResource) { asyncResource = new AsyncResource('bound-anonymous-fn') testFileToSuiteAr.set(suite.file, asyncResource) + const isUnskippable = unskippableSuites.includes(suite.file) + isForcedToRun = isUnskippable && suitesToSkip.includes(getTestSuitePath(suite.file, process.cwd())) asyncResource.runInAsyncScope(() => { - testSuiteStartCh.publish(suite) + testSuiteStartCh.publish({ testSuite: suite.file, isUnskippable, isForcedToRun }) }) } }) @@ -370,6 +378,13 @@ addHook({ const runner = run.apply(this, arguments) + this.files.forEach(path => { + const isUnskippable = isMarkedAsUnskippable({ path }) + if (isUnskippable) { + unskippableSuites.push(path) + } + }) + const onReceivedSkippableSuites = ({ err, skippableSuites }) => { if (err) { suitesToSkip = [] diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index 3ed42f4e0d6..cc659c888eb 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -52,7 +52,9 @@ class JestPlugin extends CiPlugin { isSuitesSkippingEnabled, isCodeCoverageEnabled, testCodeCoverageLinesTotal, - numSkippedSuites + numSkippedSuites, + hasUnskippableSuites, + hasForcedToRunSuites }) => { this.testSessionSpan.setTag(TEST_STATUS, status) this.testModuleSpan.setTag(TEST_STATUS, status) @@ -66,7 +68,9 @@ class JestPlugin extends CiPlugin { isCodeCoverageEnabled, testCodeCoverageLinesTotal, skippingType: 'suite', - skippingCount: numSkippedSuites + skippingCount: numSkippedSuites, + hasUnskippableSuites, + hasForcedToRunSuites } ) diff --git a/packages/datadog-plugin-jest/src/util.js b/packages/datadog-plugin-jest/src/util.js index d99d1277031..0fb9d79cb40 100644 --- a/packages/datadog-plugin-jest/src/util.js +++ b/packages/datadog-plugin-jest/src/util.js @@ -85,8 +85,10 @@ function getJestSuitesToRun (skippableSuites, originalTests, rootDir) { acc.suitesToRun.push(test) if (test?.context?.config?.testEnvironmentOptions) { test.context.config.testEnvironmentOptions['_ddUnskippable'] = true + acc.hasUnskippableSuites = true if (shouldBeSkipped) { test.context.config.testEnvironmentOptions['_ddForcedToRun'] = true + acc.hasForcedToRunSuites = true } } return acc @@ -98,7 +100,7 @@ function getJestSuitesToRun (skippableSuites, originalTests, rootDir) { acc.suitesToRun.push(test) } return acc - }, { skippedSuites: [], suitesToRun: [] }) + }, { skippedSuites: [], suitesToRun: [], hasUnskippableSuites: false, hasForcedToRunSuites: false }) } -module.exports = { getFormattedJestTestParameters, getJestTestName, getJestSuitesToRun } +module.exports = { getFormattedJestTestParameters, getJestTestName, getJestSuitesToRun, isMarkedAsUnskippable } diff --git a/packages/datadog-plugin-jest/test/util.spec.js b/packages/datadog-plugin-jest/test/util.spec.js index 9c714ce7aec..6149df4c658 100644 --- a/packages/datadog-plugin-jest/test/util.spec.js +++ b/packages/datadog-plugin-jest/test/util.spec.js @@ -118,4 +118,53 @@ describe('getJestSuitesToRun', () => { 'fixtures/test-to-skip.js' ]) }) + + it('returns hasUnskippableSuites if there is a unskippable suite', () => { + const skippableSuites = [] + const tests = [ + { path: path.join(__dirname, './fixtures/test-to-run.js'), context: { config: { testEnvironmentOptions: {} } } }, + { + path: path.join(__dirname, './fixtures/test-unskippable.js'), + context: { config: { testEnvironmentOptions: {} } } + } + ] + const rootDir = __dirname + + const { hasUnskippableSuites, hasForcedToRunSuites } = getJestSuitesToRun(skippableSuites, tests, rootDir) + expect(hasUnskippableSuites).to.equal(true) + expect(hasForcedToRunSuites).to.equal(false) + }) + + it('returns hasForcedToRunSuites if there is a forced to run suite', () => { + const skippableSuites = ['fixtures/test-unskippable.js'] + const tests = [ + { path: path.join(__dirname, './fixtures/test-to-run.js'), context: { config: { testEnvironmentOptions: {} } } }, + { + path: path.join(__dirname, './fixtures/test-unskippable.js'), + context: { config: { testEnvironmentOptions: {} } } + } + ] + const rootDir = __dirname + + const { hasUnskippableSuites, hasForcedToRunSuites } = getJestSuitesToRun(skippableSuites, tests, rootDir) + expect(hasUnskippableSuites).to.equal(true) + expect(hasForcedToRunSuites).to.equal(true) + }) + + it('adds extra `testEnvironmentOptions` if suite is unskippable or forced to run', () => { + const skippableSuites = ['fixtures/test-unskippable.js'] + const testContext = { config: { testEnvironmentOptions: {} } } + const tests = [ + { path: path.join(__dirname, './fixtures/test-to-run.js') }, + { + path: path.join(__dirname, './fixtures/test-unskippable.js'), + context: testContext + } + ] + const rootDir = __dirname + + getJestSuitesToRun(skippableSuites, tests, rootDir) + expect(testContext.config.testEnvironmentOptions['_ddUnskippable']).to.equal(true) + expect(testContext.config.testEnvironmentOptions['_ddForcedToRun']).to.equal(true) + }) }) diff --git a/packages/datadog-plugin-mocha/src/index.js b/packages/datadog-plugin-mocha/src/index.js index 203f32d43fd..3f7009afa39 100644 --- a/packages/datadog-plugin-mocha/src/index.js +++ b/packages/datadog-plugin-mocha/src/index.js @@ -11,7 +11,9 @@ const { getTestParametersString, getTestSuiteCommonTags, addIntelligentTestRunnerSpanTags, - TEST_SOURCE_START + TEST_SOURCE_START, + TEST_ITR_UNSKIPPABLE, + TEST_ITR_FORCED_RUN } = require('../../dd-trace/src/plugins/util/test') const { COMPONENT } = require('../../dd-trace/src/constants') @@ -47,14 +49,21 @@ class MochaPlugin extends CiPlugin { this.tracer._exporter.exportCoverage(formattedCoverage) }) - this.addSub('ci:mocha:test-suite:start', (suite) => { + this.addSub('ci:mocha:test-suite:start', ({ testSuite, isUnskippable, isForcedToRun }) => { const store = storage.getStore() const testSuiteMetadata = getTestSuiteCommonTags( this.command, this.frameworkVersion, - getTestSuitePath(suite.file, this.sourceRoot), + getTestSuitePath(testSuite, this.sourceRoot), 'mocha' ) + if (isUnskippable) { + testSuiteMetadata[TEST_ITR_UNSKIPPABLE] = 'true' + } + if (isForcedToRun) { + testSuiteMetadata[TEST_ITR_FORCED_RUN] = 'true' + } + const testSuiteSpan = this.tracer.startSpan('mocha.test_suite', { childOf: this.testModuleSpan, tags: { @@ -64,7 +73,7 @@ class MochaPlugin extends CiPlugin { } }) this.enter(testSuiteSpan, store) - this._testSuites.set(suite.file, testSuiteSpan) + this._testSuites.set(testSuite, testSuiteSpan) }) this.addSub('ci:mocha:test-suite:finish', (status) => { @@ -139,7 +148,9 @@ class MochaPlugin extends CiPlugin { status, isSuitesSkipped, testCodeCoverageLinesTotal, - numSkippedSuites + numSkippedSuites, + hasForcedToRunSuites, + hasUnskippableSuites }) => { if (this.testSessionSpan) { const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.itrConfig || {} @@ -155,7 +166,9 @@ class MochaPlugin extends CiPlugin { isCodeCoverageEnabled, testCodeCoverageLinesTotal, skippingCount: numSkippedSuites, - skippingType: 'suite' + skippingType: 'suite', + hasForcedToRunSuites, + hasUnskippableSuites } ) diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index 5497bc3e599..976b4043226 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -370,7 +370,9 @@ function addIntelligentTestRunnerSpanTags ( isCodeCoverageEnabled, testCodeCoverageLinesTotal, skippingCount, - skippingType = 'suite' + skippingType = 'suite', + hasUnskippableSuites, + hasForcedToRunSuites } ) { testSessionSpan.setTag(TEST_ITR_TESTS_SKIPPED, isSuitesSkipped ? 'true' : 'false') @@ -385,6 +387,15 @@ function addIntelligentTestRunnerSpanTags ( testModuleSpan.setTag(TEST_ITR_SKIPPING_COUNT, skippingCount) testModuleSpan.setTag(TEST_CODE_COVERAGE_ENABLED, isCodeCoverageEnabled ? 'true' : 'false') + if (hasUnskippableSuites) { + testSessionSpan.setTag(TEST_ITR_UNSKIPPABLE, 'true') + testModuleSpan.setTag(TEST_ITR_UNSKIPPABLE, 'true') + } + if (hasForcedToRunSuites) { + testSessionSpan.setTag(TEST_ITR_FORCED_RUN, 'true') + testModuleSpan.setTag(TEST_ITR_FORCED_RUN, 'true') + } + // If suites have been skipped we don't want to report the total coverage, as it will be wrong if (testCodeCoverageLinesTotal !== undefined && !isSuitesSkipped) { testSessionSpan.setTag(TEST_CODE_COVERAGE_LINES_PCT, testCodeCoverageLinesTotal) From bef1a4178d2d0c05172dc14a59da434edcc4176a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Mon, 9 Oct 2023 10:46:01 +0200 Subject: [PATCH 014/147] [ci-visibility] Unskippable suites for cucumber (#3681) --- .../ci-visibility/features/greetings.feature | 1 + integration-tests/cucumber/cucumber.spec.js | 132 +++++++++++++++++- .../datadog-instrumentations/src/cucumber.js | 28 +++- packages/datadog-plugin-cucumber/src/index.js | 22 ++- 4 files changed, 173 insertions(+), 10 deletions(-) diff --git a/integration-tests/ci-visibility/features/greetings.feature b/integration-tests/ci-visibility/features/greetings.feature index 0275889e831..1e2adaf50ef 100644 --- a/integration-tests/ci-visibility/features/greetings.feature +++ b/integration-tests/ci-visibility/features/greetings.feature @@ -1,3 +1,4 @@ +@datadog:unskippable Feature: Greetings Scenario: Say greetings When the greeter says greetings diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index a85b433849b..eae72ed3d7e 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -23,7 +23,9 @@ const { TEST_ITR_TESTS_SKIPPED, TEST_ITR_SKIPPING_TYPE, TEST_ITR_SKIPPING_COUNT, - TEST_CODE_COVERAGE_LINES_PCT + TEST_CODE_COVERAGE_LINES_PCT, + TEST_ITR_FORCED_RUN, + TEST_ITR_UNSKIPPABLE } = require('../../packages/dd-trace/src/plugins/util/test') const hookFile = 'dd-trace/loader-hook.mjs' @@ -547,6 +549,134 @@ versions.forEach(version => { } ) }) + it('does not skip suites if suite is marked as unskippable', (done) => { + receiver.setSettings({ + code_coverage: true, + tests_skipping: true + }) + + receiver.setSuitesToSkip([ + { + type: 'suite', + attributes: { + suite: `${featuresPath}farewell.feature` + } + }, + { + type: 'suite', + attributes: { + suite: `${featuresPath}greetings.feature` + } + } + ]) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const suites = events.filter(event => event.type === 'test_suite_end') + + assert.equal(suites.length, 2) + + const testSession = events.find(event => event.type === 'test_session_end').content + const testModule = events.find(event => event.type === 'test_session_end').content + + assert.propertyVal(testSession.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(testSession.meta, TEST_ITR_FORCED_RUN, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_FORCED_RUN, 'true') + + const skippedSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/features/farewell.feature' + ).content + const forcedToRunSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/features/greetings.feature' + ).content + + assert.propertyVal(skippedSuite.meta, TEST_STATUS, 'skip') + assert.notProperty(skippedSuite.meta, TEST_ITR_UNSKIPPABLE) + assert.notProperty(skippedSuite.meta, TEST_ITR_FORCED_RUN) + + assert.propertyVal(forcedToRunSuite.meta, TEST_STATUS, 'fail') + assert.propertyVal(forcedToRunSuite.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(forcedToRunSuite.meta, TEST_ITR_FORCED_RUN, 'true') + }, 25000) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: envVars, + stdio: 'inherit' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + it('only sets forced to run if suite was going to be skipped by ITR', (done) => { + receiver.setSettings({ + code_coverage: true, + tests_skipping: true + }) + + receiver.setSuitesToSkip([ + { + type: 'suite', + attributes: { + suite: `${featuresPath}farewell.feature` + } + } + ]) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const suites = events.filter(event => event.type === 'test_suite_end') + + assert.equal(suites.length, 2) + + const testSession = events.find(event => event.type === 'test_session_end').content + const testModule = events.find(event => event.type === 'test_session_end').content + + assert.propertyVal(testSession.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.notProperty(testSession.meta, TEST_ITR_FORCED_RUN) + assert.propertyVal(testModule.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.notProperty(testModule.meta, TEST_ITR_FORCED_RUN) + + const skippedSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/features/farewell.feature' + ) + const failedSuite = suites.find( + event => event.content.resource === 'test_suite.ci-visibility/features/greetings.feature' + ) + + assert.propertyVal(skippedSuite.content.meta, TEST_STATUS, 'skip') + assert.notProperty(skippedSuite.content.meta, TEST_ITR_UNSKIPPABLE) + assert.notProperty(skippedSuite.content.meta, TEST_ITR_FORCED_RUN) + + assert.propertyVal(failedSuite.content.meta, TEST_STATUS, 'fail') + assert.propertyVal(failedSuite.content.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.notProperty(failedSuite.content.meta, TEST_ITR_FORCED_RUN) + }, 25000) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: envVars, + stdio: 'inherit' + } + ) + + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) it('sets _dd.ci.itr.tests_skipped to false if the received suite is not skipped', (done) => { receiver.setSuitesToSkip([{ type: 'suite', diff --git a/packages/datadog-instrumentations/src/cucumber.js b/packages/datadog-instrumentations/src/cucumber.js index 0bf4dd71e66..fbadf49ee48 100644 --- a/packages/datadog-instrumentations/src/cucumber.js +++ b/packages/datadog-instrumentations/src/cucumber.js @@ -31,6 +31,10 @@ const { getTestSuitePath } = require('../../dd-trace/src/plugins/util/test') +const isMarkedAsUnskippable = (pickle) => { + return !!pickle.tags.find(tag => tag.name === '@datadog:unskippable') +} + // We'll preserve the original coverage here const originalCoverageMap = createCoverageMap() @@ -39,6 +43,9 @@ const patched = new WeakSet() let pickleByFile = {} const pickleResultByFile = {} +let skippableSuites = [] +let isForcedToRun = false +let isUnskippable = false function getSuiteStatusFromTestStatuses (testStatuses) { if (testStatuses.some(status => status === 'fail')) { @@ -91,7 +98,11 @@ function wrapRun (pl, isLatestVersion) { const testSuiteFullPath = this.pickle.uri if (!pickleResultByFile[testSuiteFullPath]) { // first test in suite - testSuiteStartCh.publish(testSuiteFullPath) + isUnskippable = isMarkedAsUnskippable(this.pickle) + const testSuitePath = getTestSuitePath(testSuiteFullPath, process.cwd()) + isForcedToRun = isUnskippable && skippableSuites.includes(testSuitePath) + + testSuiteStartCh.publish({ testSuitePath, isUnskippable, isForcedToRun }) } const testSourceLine = this.gherkinDocument && @@ -221,8 +232,11 @@ function getFilteredPickles (runtime, suitesToSkip) { return runtime.pickleIds.reduce((acc, pickleId) => { const test = runtime.eventDataCollector.getPickle(pickleId) const testSuitePath = getTestSuitePath(test.uri, process.cwd()) + + const isUnskippable = isMarkedAsUnskippable(test) const isSkipped = suitesToSkip.includes(testSuitePath) - if (isSkipped) { + + if (isSkipped && !isUnskippable) { acc.skippedSuites.add(testSuitePath) } else { acc.picklesToRun.push(pickleId) @@ -270,7 +284,11 @@ addHook({ skippableSuitesCh.publish({ onDone }) }) - const { err, skippableSuites } = await skippableSuitesPromise + const skippableResponse = await skippableSuitesPromise + + const err = skippableResponse.err + skippableSuites = skippableResponse.skippableSuites + let skippedSuites = [] let isSuitesSkipped = false @@ -315,7 +333,9 @@ addHook({ status: success ? 'pass' : 'fail', isSuitesSkipped, testCodeCoverageLinesTotal, - numSkippedSuites: skippedSuites.length + numSkippedSuites: skippedSuites.length, + hasUnskippableSuites: isUnskippable, + hasForcedToRunSuites: isForcedToRun }) }) return success diff --git a/packages/datadog-plugin-cucumber/src/index.js b/packages/datadog-plugin-cucumber/src/index.js index 75b289d8966..98fa1b4037c 100644 --- a/packages/datadog-plugin-cucumber/src/index.js +++ b/packages/datadog-plugin-cucumber/src/index.js @@ -10,7 +10,9 @@ const { finishAllTraceSpans, getTestSuitePath, getTestSuiteCommonTags, - addIntelligentTestRunnerSpanTags + addIntelligentTestRunnerSpanTags, + TEST_ITR_UNSKIPPABLE, + TEST_ITR_FORCED_RUN } = require('../../dd-trace/src/plugins/util/test') const { RESOURCE_NAME } = require('../../../ext/tags') const { COMPONENT, ERROR_MESSAGE } = require('../../dd-trace/src/constants') @@ -29,7 +31,9 @@ class CucumberPlugin extends CiPlugin { status, isSuitesSkipped, numSkippedSuites, - testCodeCoverageLinesTotal + testCodeCoverageLinesTotal, + hasUnskippableSuites, + hasForcedToRunSuites }) => { const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.itrConfig || {} addIntelligentTestRunnerSpanTags( @@ -41,7 +45,9 @@ class CucumberPlugin extends CiPlugin { isCodeCoverageEnabled, testCodeCoverageLinesTotal, skippingCount: numSkippedSuites, - skippingType: 'suite' + skippingType: 'suite', + hasUnskippableSuites, + hasForcedToRunSuites } ) @@ -55,13 +61,19 @@ class CucumberPlugin extends CiPlugin { this.tracer._exporter.flush() }) - this.addSub('ci:cucumber:test-suite:start', (testSuiteFullPath) => { + this.addSub('ci:cucumber:test-suite:start', ({ testSuitePath, isUnskippable, isForcedToRun }) => { const testSuiteMetadata = getTestSuiteCommonTags( this.command, this.frameworkVersion, - getTestSuitePath(testSuiteFullPath, this.sourceRoot), + testSuitePath, 'cucumber' ) + if (isUnskippable) { + testSuiteMetadata[TEST_ITR_UNSKIPPABLE] = 'true' + } + if (isForcedToRun) { + testSuiteMetadata[TEST_ITR_FORCED_RUN] = 'true' + } this.testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', { childOf: this.testModuleSpan, tags: { From d705c57b42034fc6ac29de3811fb728d1ec22b6e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Mon, 9 Oct 2023 14:58:37 +0200 Subject: [PATCH 015/147] =?UTF-8?q?[ci-visibility]=C2=A0Unskippable=20test?= =?UTF-8?q?s=20for=20cypress=20(#3684)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- integration-tests/cypress/cypress.spec.js | 145 +++++++++++++++++- integration-tests/cypress/e2e/spec.cy.js | 3 + packages/datadog-plugin-cypress/src/plugin.js | 46 +++++- 3 files changed, 185 insertions(+), 9 deletions(-) diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index 8f616fc0c6a..b781875bb98 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -24,7 +24,9 @@ const { TEST_ITR_TESTS_SKIPPED, TEST_SKIPPED_BY_ITR, TEST_ITR_SKIPPING_COUNT, - TEST_ITR_SKIPPING_TYPE + TEST_ITR_SKIPPING_TYPE, + TEST_ITR_UNSKIPPABLE, + TEST_ITR_FORCED_RUN } = require('../../packages/dd-trace/src/plugins/util/test') const { ERROR_MESSAGE } = require('../../packages/dd-trace/src/constants') const semver = require('semver') @@ -544,6 +546,147 @@ moduleType.forEach(({ }).catch(done) }) }) + it('does not skip tests if suite is marked as unskippable', (done) => { + receiver.setSettings({ + code_coverage: true, + tests_skipping: true + }) + + receiver.setSuitesToSkip([ + { + type: 'test', + attributes: { + name: 'context passes', + suite: 'cypress/e2e/other.cy.js' + } + }, + { + type: 'test', + attributes: { + name: 'context passes', + suite: 'cypress/e2e/spec.cy.js' + } + } + ]) + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + const testModule = events.find(event => event.type === 'test_session_end').content + + assert.propertyVal(testSession.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(testSession.meta, TEST_ITR_FORCED_RUN, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(testModule.meta, TEST_ITR_FORCED_RUN, 'true') + + const unskippablePassedTest = events.find(event => + event.content.resource === 'cypress/e2e/spec.cy.js.context passes' + ) + const unskippableFailedTest = events.find(event => + event.content.resource === 'cypress/e2e/spec.cy.js.other context fails' + ) + assert.propertyVal(unskippablePassedTest.content.meta, TEST_STATUS, 'pass') + assert.propertyVal(unskippablePassedTest.content.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.propertyVal(unskippablePassedTest.content.meta, TEST_ITR_FORCED_RUN, 'true') + + assert.propertyVal(unskippableFailedTest.content.meta, TEST_STATUS, 'fail') + assert.propertyVal(unskippableFailedTest.content.meta, TEST_ITR_UNSKIPPABLE, 'true') + // This was not going to be skipped + assert.notProperty(unskippableFailedTest.content.meta, TEST_ITR_FORCED_RUN) + }, 25000) + + const { + NODE_OPTIONS, + ...restEnvVars + } = getCiVisAgentlessConfig(receiver.port) + + childProcess = exec( + testCommand, + { + cwd, + env: { + ...restEnvVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}` + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', () => { + receiverPromise.then(() => { + done() + }).catch(done) + }) + }) + it('only sets forced to run if test was going to be skipped by ITR', (done) => { + receiver.setSettings({ + code_coverage: true, + tests_skipping: true + }) + + receiver.setSuitesToSkip([ + { + type: 'test', + attributes: { + name: 'context passes', + suite: 'cypress/e2e/other.cy.js' + } + } + ]) + + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + + const testSession = events.find(event => event.type === 'test_session_end').content + const testModule = events.find(event => event.type === 'test_session_end').content + + assert.propertyVal(testSession.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.notProperty(testSession.meta, TEST_ITR_FORCED_RUN) + assert.propertyVal(testModule.meta, TEST_ITR_UNSKIPPABLE, 'true') + assert.notProperty(testModule.meta, TEST_ITR_FORCED_RUN) + + const unskippablePassedTest = events.find(event => + event.content.resource === 'cypress/e2e/spec.cy.js.context passes' + ) + const unskippableFailedTest = events.find(event => + event.content.resource === 'cypress/e2e/spec.cy.js.other context fails' + ) + assert.propertyVal(unskippablePassedTest.content.meta, TEST_STATUS, 'pass') + assert.propertyVal(unskippablePassedTest.content.meta, TEST_ITR_UNSKIPPABLE, 'true') + // This was not going to be skipped + assert.notProperty(unskippablePassedTest.content.meta, TEST_ITR_FORCED_RUN) + + assert.propertyVal(unskippableFailedTest.content.meta, TEST_STATUS, 'fail') + assert.propertyVal(unskippableFailedTest.content.meta, TEST_ITR_UNSKIPPABLE, 'true') + // This was not going to be skipped + assert.notProperty(unskippableFailedTest.content.meta, TEST_ITR_FORCED_RUN) + }, 25000) + + const { + NODE_OPTIONS, + ...restEnvVars + } = getCiVisAgentlessConfig(receiver.port) + + childProcess = exec( + testCommand, + { + cwd, + env: { + ...restEnvVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}` + }, + stdio: 'pipe' + } + ) + + childProcess.on('exit', () => { + receiverPromise.then(() => { + done() + }).catch(done) + }) + }) it('sets _dd.ci.itr.tests_skipped to false if the received test is not skipped', (done) => { receiver.setSuitesToSkip([{ type: 'test', diff --git a/integration-tests/cypress/e2e/spec.cy.js b/integration-tests/cypress/e2e/spec.cy.js index 30372c3f1d6..0a826924384 100644 --- a/integration-tests/cypress/e2e/spec.cy.js +++ b/integration-tests/cypress/e2e/spec.cy.js @@ -1,3 +1,6 @@ +/** + * @datadog {"unskippable": true} + */ /* eslint-disable */ describe('context', () => { it('passes', () => { diff --git a/packages/datadog-plugin-cypress/src/plugin.js b/packages/datadog-plugin-cypress/src/plugin.js index 5d6ec1ab76c..6b767501362 100644 --- a/packages/datadog-plugin-cypress/src/plugin.js +++ b/packages/datadog-plugin-cypress/src/plugin.js @@ -21,11 +21,14 @@ const { getCoveredFilenamesFromCoverage, getTestSuitePath, addIntelligentTestRunnerSpanTags, - TEST_SKIPPED_BY_ITR + TEST_SKIPPED_BY_ITR, + TEST_ITR_UNSKIPPABLE, + TEST_ITR_FORCED_RUN } = require('../../dd-trace/src/plugins/util/test') const { ORIGIN_KEY, COMPONENT } = require('../../dd-trace/src/constants') const log = require('../../dd-trace/src/log') const NoopTracer = require('../../dd-trace/src/noop/tracer') +const { isMarkedAsUnskippable } = require('../../datadog-plugin-jest/src/util') const TEST_FRAMEWORK_NAME = 'cypress' @@ -185,8 +188,11 @@ module.exports = (on, config) => { let isSuitesSkippingEnabled = false let isCodeCoverageEnabled = false let testsToSkip = [] + const unskippableSuites = [] + let hasForcedToRunSuites = false + let hasUnskippableSuites = false - function getTestSpan (testName, testSuite) { + function getTestSpan (testName, testSuite, isUnskippable, isForcedToRun) { const testSuiteTags = { [TEST_COMMAND]: command, [TEST_COMMAND]: command, @@ -212,6 +218,16 @@ module.exports = (on, config) => { testSpanMetadata[TEST_CODE_OWNERS] = codeOwners } + if (isUnskippable) { + hasUnskippableSuites = true + testSpanMetadata[TEST_ITR_UNSKIPPABLE] = 'true' + } + + if (isForcedToRun) { + hasForcedToRunSuites = true + testSpanMetadata[TEST_ITR_FORCED_RUN] = 'true' + } + return tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test`, { childOf, tags: { @@ -233,13 +249,21 @@ module.exports = (on, config) => { isCodeCoverageEnabled = itrConfig.isCodeCoverageEnabled } - getSkippableTests(isSuitesSkippingEnabled, tracer, testConfiguration).then(({ err, skippableTests }) => { + return getSkippableTests(isSuitesSkippingEnabled, tracer, testConfiguration).then(({ err, skippableTests }) => { if (err) { log.error(err) } else { testsToSkip = skippableTests || [] } + // `details.specs` are test files + details.specs.forEach(({ absolute, relative }) => { + const isUnskippableSuite = isMarkedAsUnskippable({ path: absolute }) + if (isUnskippableSuite) { + unskippableSuites.push(relative) + } + }) + const childOf = getTestParentSpan(tracer) rootDir = getRootDir(details) @@ -340,7 +364,9 @@ module.exports = (on, config) => { isSuitesSkippingEnabled, isCodeCoverageEnabled, skippingType: 'test', - skippingCount: skippedTests.length + skippingCount: skippedTests.length, + hasForcedToRunSuites, + hasUnskippableSuites } ) @@ -384,17 +410,21 @@ module.exports = (on, config) => { }, 'dd:beforeEach': (test) => { const { testName, testSuite } = test - // skip test - if (testsToSkip.find(test => { + const shouldSkip = !!testsToSkip.find(test => { return testName === test.name && testSuite === test.suite - })) { + }) + const isUnskippable = unskippableSuites.includes(testSuite) + const isForcedToRun = shouldSkip && isUnskippable + + // skip test + if (shouldSkip && !isUnskippable) { skippedTests.push(test) isTestsSkipped = true return { shouldSkip: true } } if (!activeSpan) { - activeSpan = getTestSpan(testName, testSuite) + activeSpan = getTestSpan(testName, testSuite, isUnskippable, isForcedToRun) } return activeSpan ? { traceId: activeSpan.context().toTraceId() } : {} From 51886ed6e9d9abbd7fc39ec3894da97cba2b1781 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 10 Oct 2023 15:23:05 +0200 Subject: [PATCH 016/147] [ci-visibility] Add support for AWS Codepipeline (#3692) --- packages/dd-trace/src/plugins/util/ci.js | 17 +++++ .../plugins/util/ci-env/awscodepipeline.json | 62 +++++++++++++++++++ 2 files changed, 79 insertions(+) create mode 100644 packages/dd-trace/test/plugins/util/ci-env/awscodepipeline.json diff --git a/packages/dd-trace/src/plugins/util/ci.js b/packages/dd-trace/src/plugins/util/ci.js index 3c1afb13bb4..8ced8a1d054 100644 --- a/packages/dd-trace/src/plugins/util/ci.js +++ b/packages/dd-trace/src/plugins/util/ci.js @@ -602,6 +602,23 @@ module.exports = { tags[refKey] = ref } + if (env.CODEBUILD_INITIATOR?.startsWith('codepipeline/')) { + const { + CODEBUILD_BUILD_ARN, + DD_ACTION_EXECUTION_ID, + DD_PIPELINE_EXECUTION_ID + } = env + tags = { + [CI_PROVIDER_NAME]: 'awscodepipeline', + [CI_PIPELINE_ID]: DD_PIPELINE_EXECUTION_ID, + [CI_ENV_VARS]: JSON.stringify({ + CODEBUILD_BUILD_ARN, + DD_PIPELINE_EXECUTION_ID, + DD_ACTION_EXECUTION_ID + }) + } + } + normalizeTag(tags, CI_WORKSPACE_PATH, resolveTilde) normalizeTag(tags, GIT_REPOSITORY_URL, filterSensitiveInfoFromRepository) normalizeTag(tags, GIT_BRANCH, normalizeRef) diff --git a/packages/dd-trace/test/plugins/util/ci-env/awscodepipeline.json b/packages/dd-trace/test/plugins/util/ci-env/awscodepipeline.json new file mode 100644 index 00000000000..6f3071331fe --- /dev/null +++ b/packages/dd-trace/test/plugins/util/ci-env/awscodepipeline.json @@ -0,0 +1,62 @@ +[ + [ + { + "CODEBUILD_BUILD_ARN": "arn:aws:codebuild:eu-north-1:12345678:build/codebuild-demo-project:b1e6661e-e4f2-4156-9ab9-82a19", + "CODEBUILD_INITIATOR": "codepipeline/test-pipeline", + "DD_ACTION_EXECUTION_ID": "35519dc3-7c45-493c-9ba6-cd78ea11f69d", + "DD_GIT_BRANCH": "user-supplied-branch", + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git", + "DD_PIPELINE_EXECUTION_ID": "bb1f15ed-fde2-494d-8e13-88785bca9cc0" + }, + { + "_dd.ci.env_vars": "{\"CODEBUILD_BUILD_ARN\":\"arn:aws:codebuild:eu-north-1:12345678:build/codebuild-demo-project:b1e6661e-e4f2-4156-9ab9-82a19\",\"DD_PIPELINE_EXECUTION_ID\":\"bb1f15ed-fde2-494d-8e13-88785bca9cc0\",\"DD_ACTION_EXECUTION_ID\":\"35519dc3-7c45-493c-9ba6-cd78ea11f69d\"}", + "ci.pipeline.id": "bb1f15ed-fde2-494d-8e13-88785bca9cc0", + "ci.provider.name": "awscodepipeline", + "git.branch": "user-supplied-branch", + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "git@github.com:DataDog/userrepo.git" + } + ], + [ + { + "CODEBUILD_INITIATOR": "lambdafunction/test-lambda", + "DD_GIT_BRANCH": "user-supplied-branch", + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git" + }, + { + "git.branch": "user-supplied-branch", + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "git@github.com:DataDog/userrepo.git" + } + ] +] From f12ece8ff97bb76b777005e257301aa3d8c8a781 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 10 Oct 2023 15:39:08 +0200 Subject: [PATCH 017/147] [ci-visibility] Fix cucumber integration tests (#3698) --- integration-tests/cucumber/cucumber.spec.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index eae72ed3d7e..8be97a841fd 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -29,8 +29,8 @@ const { } = require('../../packages/dd-trace/src/plugins/util/test') const hookFile = 'dd-trace/loader-hook.mjs' -const isOldNode = semver.satisfies(process.version, '<=12') -const versions = ['7.0.0', isOldNode ? '8' : 'latest'] +const isOldNode = semver.satisfies(process.version, '<=16') +const versions = ['7.0.0', isOldNode ? '9' : 'latest'] const moduleType = [ { From 85ea7beb323960eaaaf0d2a5719e443024672708 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Tue, 10 Oct 2023 16:24:41 +0200 Subject: [PATCH 018/147] Add an integration test for Code Hotspots and Endpoint Profiling (#3688) --- integration-tests/profiler.spec.js | 107 ++++++++++++++++++++- integration-tests/profiler/codehotspots.js | 44 +++++++++ 2 files changed, 149 insertions(+), 2 deletions(-) create mode 100644 integration-tests/profiler/codehotspots.js diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index 692f9cae761..48b85028001 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -8,6 +8,10 @@ const childProcess = require('child_process') const { fork } = childProcess const path = require('path') const { assert } = require('chai') +const fs = require('node:fs/promises') +const fsync = require('node:fs') +const zlib = require('node:zlib') +const { Profile } = require('pprof-format') async function checkProfiles (agent, proc, timeout, expectedProfileTypes = ['wall', 'space'], expectBadExit = false, multiplicity = 1) { @@ -20,7 +24,12 @@ async function checkProfiles (agent, proc, timeout, } }, timeout, multiplicity) - await new Promise((resolve, reject) => { + await processExitPromise(proc, timeout, expectBadExit) + return resultPromise +} + +function processExitPromise (proc, timeout, expectBadExit = false) { + return new Promise((resolve, reject) => { const timeoutObj = setTimeout(() => { reject(new Error('Process timed out')) }, timeout) @@ -39,7 +48,6 @@ async function checkProfiles (agent, proc, timeout, .on('error', reject) .on('exit', checkExitCode) }) - return resultPromise } describe('profiler', () => { @@ -65,6 +73,101 @@ describe('profiler', () => { await sandbox.remove() }) + it('code hotspots and endpoint tracing works', async () => { + const procStart = BigInt(Date.now() * 1000000) + const proc = fork(path.join(cwd, 'profiler/codehotspots.js'), { + cwd, + env: { + DD_PROFILING_PROFILERS: 'wall', + DD_PROFILING_EXPORTERS: 'file', + DD_PROFILING_ENABLED: 1, + DD_PROFILING_CODEHOTSPOTS_ENABLED: 1, + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: 1 + } + }) + + await processExitPromise(proc, 5000) + const procEnd = BigInt(Date.now() * 1000000) + + const dirEntries = await fs.readdir(cwd) + // Get the latest wall_*.pprof file + const pprofEntries = dirEntries.filter(name => /^wall_.+\.pprof$/.test(name)) + assert.isTrue(pprofEntries.length > 0, `No wall_*.pprof file found in ${cwd}`) + const pprofEntry = pprofEntries + .map(name => ({ name, modified: fsync.statSync(path.join(cwd, name), { bigint: true }).mtimeNs })) + .reduce((a, b) => a.modified > b.modified ? a : b) + .name + const pprofGzipped = await fs.readFile(path.join(cwd, pprofEntry)) + const pprofUnzipped = zlib.gunzipSync(pprofGzipped) + const prof = Profile.decode(pprofUnzipped) + + // We check the profile for following invariants: + // - every sample needs to have an 'end_timestamp_ns' label that has values (nanos since UNIX + // epoch) between process start and end. + // - it needs to have samples with 9 total different 'span id's, and 3 different + // 'local root span id's + // - samples with spans also must have a 'trace endpoint' label with values 'endpoint-0', + // 'endpoint-1', or 'endpoint-2' + // - every occurrence of a span must have the same root span and endpoint + const rootSpans = new Set() + const endpoints = new Set() + const spans = new Map() + const strings = prof.stringTable + const tsKey = strings.dedup('end_timestamp_ns') + const spanKey = strings.dedup('span id') + const rootSpanKey = strings.dedup('local root span id') + const endpointKey = strings.dedup('trace endpoint') + for (const sample of prof.sample) { + let ts, spanId, rootSpanId, endpoint + for (const label of sample.label) { + switch (label.key) { + case tsKey: ts = label.num; break + case spanKey: spanId = label.str; break + case rootSpanKey: rootSpanId = label.str; break + case endpointKey: endpoint = label.str; break + default: assert.fail(`Unexpected label key ${strings.dedup(label.key)}`) + } + } + // Timestamp must be defined and be between process start and end time + assert.isDefined(ts) + assert.isTrue(ts <= procEnd) + assert.isTrue(ts >= procStart) + // Either all or none of span-related labels are defined + if (spanId || rootSpanId || endpoint) { + assert.isDefined(spanId) + assert.isDefined(rootSpanId) + assert.isDefined(endpoint) + + rootSpans.add(rootSpanId) + const spanData = { rootSpanId, endpoint } + const existingSpanData = spans.get(spanId) + if (existingSpanData) { + // Span's root span and endpoint must be consistent across samples + assert.deepEqual(spanData, existingSpanData) + } else { + // New span id, store span data + spans.set(spanId, spanData) + // Verify endpoint value + const endpointVal = strings.strings[endpoint] + switch (endpointVal) { + case 'endpoint-0': + case 'endpoint-1': + case 'endpoint-2': + endpoints.add(endpoint) + break + default: + assert.fail(`Unexpected endpoint value ${endpointVal}`) + } + } + } + } + // Need to have a total of 9 different spans, with 3 different root spans + // and 3 different endpoints. + assert.equal(spans.size, 9) + assert.equal(rootSpans.size, 3) + assert.equal(endpoints.size, 3) + }) + context('shutdown', () => { beforeEach(async () => { agent = await new FakeAgent().start() diff --git a/integration-tests/profiler/codehotspots.js b/integration-tests/profiler/codehotspots.js new file mode 100644 index 00000000000..fe40b891363 --- /dev/null +++ b/integration-tests/profiler/codehotspots.js @@ -0,0 +1,44 @@ +'use strict' + +const DDTrace = require('dd-trace') + +const tracer = DDTrace.init() + +function busyLoop () { + const start = process.hrtime.bigint() + for (;;) { + const now = process.hrtime.bigint() + // Busy cycle for 20ms + if (now - start > 20000000n) { + break + } + } +} + +let counter = 0 + +function runBusySpans () { + tracer.trace('x' + counter, (span, done) => { + span.setTag('span.type', 'web') + span.setTag('resource.name', `endpoint-${counter}`) + setImmediate(() => { + for (let i = 0; i < 3; ++i) { + const z = i + tracer.trace('y' + i, (span2, done2) => { + setTimeout(() => { + busyLoop() + done2() + if (z === 2) { + if (++counter < 3) { + setTimeout(runBusySpans, 0) + } + done() + } + }, 0) + }) + } + }) + }) +} + +setTimeout(runBusySpans, 100) From 55221da46688496a28ac2b64807b351753339124 Mon Sep 17 00:00:00 2001 From: Ayan Khan Date: Tue, 10 Oct 2023 14:49:52 -0400 Subject: [PATCH 019/147] fix failing sirun benchmark tests for graphql (#3701) --- benchmark/sirun/plugin-graphql/index.js | 2 +- benchmark/sirun/plugin-graphql/schema.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/benchmark/sirun/plugin-graphql/index.js b/benchmark/sirun/plugin-graphql/index.js index 369abad8a3c..29ff7f64fd8 100644 --- a/benchmark/sirun/plugin-graphql/index.js +++ b/benchmark/sirun/plugin-graphql/index.js @@ -23,7 +23,7 @@ if (Number(process.env.WITH_ASYNC_HOOKS)) { require('async_hooks').createHook(hook).enable() } -const graphql = require('../../../versions/graphql/node_modules/graphql') +const graphql = require(`../../../versions/graphql`).get() const schema = require('./schema') const source = ` diff --git a/benchmark/sirun/plugin-graphql/schema.js b/benchmark/sirun/plugin-graphql/schema.js index 91304f738f6..0c330b5bf71 100644 --- a/benchmark/sirun/plugin-graphql/schema.js +++ b/benchmark/sirun/plugin-graphql/schema.js @@ -1,6 +1,6 @@ 'use strict' -const graphql = require('../../../versions/graphql/node_modules/graphql') +const graphql = require(`../../../versions/graphql`).get() const Human = new graphql.GraphQLObjectType({ name: 'Human', From b62a663602a3cb0144e1dd5c298b4958bfd438d5 Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Wed, 11 Oct 2023 14:48:33 +0200 Subject: [PATCH 020/147] Enable appsec telemetry before waf init (#3693) * Enable appsec telemetry before waf init * test reportWafInit is called when enabling appsec --- packages/dd-trace/src/appsec/index.js | 4 +- packages/dd-trace/test/appsec/index.spec.js | 60 +++++++++++++++++++++ 2 files changed, 62 insertions(+), 2 deletions(-) diff --git a/packages/dd-trace/src/appsec/index.js b/packages/dd-trace/src/appsec/index.js index 41eba3bdd13..dfd04ae3c0e 100644 --- a/packages/dd-trace/src/appsec/index.js +++ b/packages/dd-trace/src/appsec/index.js @@ -32,6 +32,8 @@ function enable (_config) { if (isEnabled) return try { + appsecTelemetry.enable(_config.telemetry) + setTemplates(_config) RuleManager.applyRules(_config.appsec.rules, _config.appsec) @@ -40,8 +42,6 @@ function enable (_config) { Reporter.setRateLimit(_config.appsec.rateLimit) - appsecTelemetry.enable(_config.telemetry) - incomingHttpRequestStart.subscribe(incomingHttpStartTranslator) incomingHttpRequestEnd.subscribe(incomingHttpEndTranslator) bodyParser.subscribe(onRequestBodyParsed) diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index 0a5cc6c4add..04f5c597a51 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -21,6 +21,7 @@ const getPort = require('get-port') const blockedTemplate = require('../../src/appsec/blocked_templates') const { storage } = require('../../../datadog-core') const addresses = require('../../src/appsec/addresses') +const telemetryMetrics = require('../../src/telemetry/metrics') describe('AppSec Index', () => { let config @@ -645,6 +646,65 @@ describe('AppSec Index', () => { }) }) }) + + describe('Metrics', () => { + const appsecNamespace = telemetryMetrics.manager.namespace('appsec') + let config + + beforeEach(() => { + sinon.restore() + + appsecNamespace.reset() + + config = new Config({ + appsec: { + enabled: true + } + }) + }) + + afterEach(() => { + appsec.disable() + }) + + after(() => { + appsecNamespace.reset() + }) + + it('should increment waf.init metric', () => { + config.telemetry.enabled = true + config.telemetry.metrics = true + + appsec.enable(config) + + const metrics = appsecNamespace.metrics.toJSON() + + expect(metrics.series.length).to.equal(1) + expect(metrics.series[0].metric).to.equal('waf.init') + }) + + it('should not increment waf.init metric if metrics are not enabled', () => { + config.telemetry.enabled = true + config.telemetry.metrics = false + + appsec.enable(config) + + const metrics = appsecNamespace.metrics.toJSON() + + expect(metrics).to.be.undefined + }) + + it('should not increment waf.init metric if telemetry is not enabled', () => { + config.telemetry.enabled = false + config.telemetry.metrics = true + + appsec.enable(config) + + const metrics = appsecNamespace.metrics.toJSON() + + expect(metrics).to.be.undefined + }) + }) }) describe('IP blocking', () => { From 106bfbb0623789cd13fe8722cc50d9d3be65e7a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 13 Oct 2023 11:48:30 +0200 Subject: [PATCH 021/147] [ci-visibility] Fix playwright latest release (#3712) --- packages/datadog-instrumentations/src/playwright.js | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/datadog-instrumentations/src/playwright.js b/packages/datadog-instrumentations/src/playwright.js index 48bcd33fd19..6f8753bb9e3 100644 --- a/packages/datadog-instrumentations/src/playwright.js +++ b/packages/datadog-instrumentations/src/playwright.js @@ -181,6 +181,15 @@ function dispatcherHook (dispatcherExport) { return dispatcherExport } +function getTestByTestId (dispatcher, testId) { + if (dispatcher._testById) { + return dispatcher._testById.get(testId)?.test + } + if (dispatcher._allTests) { + return dispatcher._allTests.find(({ id }) => id === testId) + } +} + function dispatcherHookNew (dispatcherExport, runWrapper) { shimmer.wrap(dispatcherExport.Dispatcher.prototype, 'run', runWrapper) shimmer.wrap(dispatcherExport.Dispatcher.prototype, '_createWorker', createWorker => function () { @@ -188,11 +197,11 @@ function dispatcherHookNew (dispatcherExport, runWrapper) { const worker = createWorker.apply(this, arguments) worker.on('testBegin', ({ testId }) => { - const { test } = dispatcher._testById.get(testId) + const test = getTestByTestId(dispatcher, testId) testBeginHandler(test) }) worker.on('testEnd', ({ testId, status, errors }) => { - const { test } = dispatcher._testById.get(testId) + const test = getTestByTestId(dispatcher, testId) testEndHandler(test, STATUS_TO_TEST_STATUS[status], errors && errors[0]) }) From 200e2cb08f4bb45cbda6005bb772e4581a0aaaf1 Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Mon, 16 Oct 2023 11:32:13 +0200 Subject: [PATCH 022/147] Fix supported MongoDB versions in NoSQL injection test (#3717) * Fix supported MongoDB versions in NoSQL injection test * Simplify version range for mongodb --- .../datadog-instrumentations/src/mongodb.js | 2 +- ...yzer.express-mongo-sanitize.plugin.spec.js | 23 +++++++++++++++---- packages/dd-trace/test/plugins/externals.json | 2 +- 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/packages/datadog-instrumentations/src/mongodb.js b/packages/datadog-instrumentations/src/mongodb.js index 6dc20b5a8c7..f73aa21652a 100644 --- a/packages/datadog-instrumentations/src/mongodb.js +++ b/packages/datadog-instrumentations/src/mongodb.js @@ -29,7 +29,7 @@ const collectionMethodsWithTwoFilters = [ const startCh = channel('datadog:mongodb:collection:filter:start') -addHook({ name: 'mongodb', versions: ['>=3.3'] }, mongodb => { +addHook({ name: 'mongodb', versions: ['>=3.3 <5', '5', '>=6'] }, mongodb => { [...collectionMethodsWithFilter, ...collectionMethodsWithTwoFilters].forEach(methodName => { if (!(methodName in mongodb.Collection.prototype)) return diff --git a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js index 3daacbdfe78..7b70c6f8712 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js @@ -1,14 +1,27 @@ 'use strict' -const { prepareTestServerForIastInExpress } = require('../utils') const axios = require('axios') -const agent = require('../../../plugins/agent') -const path = require('path') -const os = require('os') const fs = require('fs') +const os = require('os') +const path = require('path') +const semver = require('semver') +const { prepareTestServerForIastInExpress } = require('../utils') +const agent = require('../../../plugins/agent') + describe('nosql injection detection in mongodb - whole feature', () => { withVersions('express', 'express', '>4.18.0', expressVersion => { withVersions('mongodb', 'mongodb', mongodbVersion => { + const mongodb = require(`../../../../../../versions/mongodb@${mongodbVersion}`) + + const satisfiesNodeVersionForMongo3and4 = + (semver.satisfies(process.version, '<14.20.1') && semver.satisfies(mongodb.version(), '>=3.3 <5')) + const satisfiesNodeVersionForMongo5 = + (semver.satisfies(process.version, '>=14.20.1 <16.20.1') && semver.satisfies(mongodb.version(), '5')) + const satisfiesNodeVersionForMongo6 = + (semver.satisfies(process.version, '>=16.20.1') && semver.satisfies(mongodb.version(), '>=6')) + + if (!satisfiesNodeVersionForMongo3and4 && !satisfiesNodeVersionForMongo5 && !satisfiesNodeVersionForMongo6) return + const vulnerableMethodFilename = 'mongodb-vulnerable-method.js' let collection, tmpFilePath @@ -17,7 +30,7 @@ describe('nosql injection detection in mongodb - whole feature', () => { }) before(async () => { - const { MongoClient } = require(`../../../../../../versions/mongodb@${mongodbVersion}`).get() + const { MongoClient } = mongodb.get() const client = new MongoClient('mongodb://127.0.0.1:27017') await client.connect() diff --git a/packages/dd-trace/test/plugins/externals.json b/packages/dd-trace/test/plugins/externals.json index c7707705934..01f65e0551e 100644 --- a/packages/dd-trace/test/plugins/externals.json +++ b/packages/dd-trace/test/plugins/externals.json @@ -44,7 +44,7 @@ "express-mongo-sanitize": [ { "name": "mongodb", - "versions": [">=3.3"] + "versions": [">=3.3 <5", "5", ">=6"] }, { "name": "mongodb-core", From 23fca38bd0362a90f62e91ef15f708d4d9775f59 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 17 Oct 2023 15:06:35 +0200 Subject: [PATCH 023/147] Fix dev release script (#3697) --- .github/workflows/release-dev.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release-dev.yml b/.github/workflows/release-dev.yml index 7abf239b538..936c0ee0737 100644 --- a/.github/workflows/release-dev.yml +++ b/.github/workflows/release-dev.yml @@ -11,6 +11,7 @@ jobs: environment: npm permissions: id-token: write + contents: write env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} steps: From c631f238b29d349b7509f0b863add57e28c7663f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 17 Oct 2023 15:33:52 +0200 Subject: [PATCH 024/147] Fix release scripts (#3723) --- .github/workflows/release-3.yml | 1 + .github/workflows/release-latest.yml | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/release-3.yml b/.github/workflows/release-3.yml index 8061bcf81ce..ec25371051a 100644 --- a/.github/workflows/release-3.yml +++ b/.github/workflows/release-3.yml @@ -15,6 +15,7 @@ jobs: environment: npm permissions: id-token: write + contents: write env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} steps: diff --git a/.github/workflows/release-latest.yml b/.github/workflows/release-latest.yml index 613ed456864..a45ed3c87a7 100644 --- a/.github/workflows/release-latest.yml +++ b/.github/workflows/release-latest.yml @@ -15,6 +15,7 @@ jobs: environment: npm permissions: id-token: write + contents: write env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} outputs: From 73ad902644bf863ef03de1b3ea056bcd026657aa Mon Sep 17 00:00:00 2001 From: William Conti <58711692+wconti27@users.noreply.github.com> Date: Wed, 18 Oct 2023 06:09:03 -0400 Subject: [PATCH 025/147] report tested integrations and their tested versions (#3669) Add tracking of supported integrations and supported integration versions and output data as artifacts during testing --- .github/actions/testagent/logs/action.yml | 23 ++++++++++++++++++ docker-compose.yml | 2 +- packages/dd-trace/test/plugins/agent.js | 29 +++++++++++++++++++---- packages/dd-trace/test/setup/mocha.js | 9 +++++++ 4 files changed, 58 insertions(+), 5 deletions(-) diff --git a/.github/actions/testagent/logs/action.yml b/.github/actions/testagent/logs/action.yml index 070e35d19f2..bb80d251848 100644 --- a/.github/actions/testagent/logs/action.yml +++ b/.github/actions/testagent/logs/action.yml @@ -15,3 +15,26 @@ runs: docker-compose logs testagent fi shell: bash + - name: Get Tested Integrations from Test Agent + run: | + # make temporary files to save response data to + response=$(mktemp) && headers=$(mktemp) + + # create artifacts directory if it doesn't exist + mkdir -p "./artifacts" + + # get tested integrations + curl -o "$response" -D "$headers" http://127.0.0.1:9126/test/integrations/tested_versions + + # get filename representing the name of the tested integration from headers + filename=$(awk -F': ' '/file-name/{print $2}' "$headers" | tr -d '\r\n') + + # copy data to final file and remove temp files + mv "$response" "artifacts/${filename}_supported_versions.csv" + rm "$headers" + shell: bash + - name: Archive Test Agent Artifacts + uses: actions/upload-artifact@v3 + with: + name: supported-integrations + path: ./artifacts diff --git a/docker-compose.yml b/docker-compose.yml index ef86aab2730..ec9a519fde6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -128,7 +128,7 @@ services: - LDAP_PASSWORDS=password1,password2 testagent: - image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:latest + image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.13.1 ports: - "127.0.0.1:9126:9126" environment: diff --git a/packages/dd-trace/test/plugins/agent.js b/packages/dd-trace/test/plugins/agent.js index 953312190b4..06c41d77731 100644 --- a/packages/dd-trace/test/plugins/agent.js +++ b/packages/dd-trace/test/plugins/agent.js @@ -16,6 +16,7 @@ let agent = null let listener = null let tracer = null let plugins = [] +const testedPlugins = [] function isMatchingTrace (spans, spanResourceMatch) { if (!spanResourceMatch) { @@ -37,15 +38,33 @@ function ciVisRequestHandler (request, response) { function addEnvironmentVariablesToHeaders (headers) { // get all environment variables that start with "DD_" - const ddEnvVars = Object.entries(process.env) - .filter(([key]) => key.startsWith('DD_')) - .map(([key, value]) => `${key}=${value}`) + const ddEnvVars = new Map( + Object.entries(process.env) + .filter(([key]) => key.startsWith('DD_')) + ) + + // add plugin name and plugin version to headers, this is used for verifying tested + // integration version ranges + const currentPlugin = testedPlugins[testedPlugins.length - 1] + if (currentPlugin && currentPlugin.pluginName && currentPlugin.pluginVersion) { + ddEnvVars.set('DD_INTEGRATION', currentPlugin.pluginName) + ddEnvVars.set('DD_INTEGRATION_VERSION', currentPlugin.pluginVersion) + } // add the DD environment variables to the header if any exist // to send with trace to final agent destination if (ddEnvVars.length > 0) { headers['X-Datadog-Trace-Env-Variables'] = ddEnvVars.join(',') } + + // serialize the DD environment variables into a string of k=v pairs separated by comma + const serializedEnvVars = Array.from(ddEnvVars.entries()) + .map(([key, value]) => `${key}=${value}`) + .join(',') + + // add the serialized DD environment variables to the header + // to send with trace to the final agent destination + headers['X-Datadog-Trace-Env-Variables'] = serializedEnvVars } function handleTraceRequest (req, res, sendToTestAgent) { @@ -333,5 +352,7 @@ module.exports = { .forEach(name => { delete require.cache[name] }) - } + }, + + testedPlugins } diff --git a/packages/dd-trace/test/setup/mocha.js b/packages/dd-trace/test/setup/mocha.js index ba076ebcbaf..840684761a5 100644 --- a/packages/dd-trace/test/setup/mocha.js +++ b/packages/dd-trace/test/setup/mocha.js @@ -18,6 +18,8 @@ global.withExports = withExports global.withNamingSchema = withNamingSchema global.withPeerService = withPeerService +const testedPlugins = agent.testedPlugins + const packageVersionFailures = Object.create({}) function loadInst (plugin) { @@ -216,6 +218,13 @@ function withVersions (plugin, modules, range, cb) { let nodePath before(() => { + // set plugin name and version to later report to test agent regarding tested integrations and + // their tested range of versions + const lastPlugin = testedPlugins[testedPlugins.length - 1] + if (!lastPlugin || lastPlugin.pluginName !== plugin || lastPlugin.pluginVersion !== v.version) { + testedPlugins.push({ pluginName: plugin, pluginVersion: v.version }) + } + nodePath = process.env.NODE_PATH process.env.NODE_PATH = [process.env.NODE_PATH, versionPath] .filter(x => x && x !== 'undefined') From af48ea02dccb9fe37029e2067bbfcee701112036 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Wed, 25 Oct 2023 10:55:19 +0200 Subject: [PATCH 026/147] Support node21 (#3729) --------- Co-authored-by: Stephen Belanger --- .github/actions/node/20/action.yml | 7 + .github/workflows/appsec.yml | 6 + .github/workflows/lambda.yml | 2 + .github/workflows/plugins.yml | 8 + .github/workflows/profiling.yml | 2 + .github/workflows/tracing.yml | 2 + package.json | 4 +- ...sql-injection-analyzer.knex.plugin.spec.js | 2 +- yarn.lock | 177 ++++++++++++++++-- 9 files changed, 196 insertions(+), 14 deletions(-) create mode 100644 .github/actions/node/20/action.yml diff --git a/.github/actions/node/20/action.yml b/.github/actions/node/20/action.yml new file mode 100644 index 00000000000..cf2ff83d3d9 --- /dev/null +++ b/.github/actions/node/20/action.yml @@ -0,0 +1,7 @@ +name: Node 20 +runs: + using: composite + steps: + - uses: actions/setup-node@v3 + with: + node-version: '20' diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index cc46e3ff25c..f37acbe97bc 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -92,6 +92,8 @@ jobs: - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/18 - run: yarn test:appsec:plugins:ci + - uses: ./.github/actions/node/20 + - run: yarn test:appsec:plugins:ci - uses: codecov/codecov-action@v2 mysql: @@ -115,6 +117,8 @@ jobs: - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/18 - run: yarn test:appsec:plugins:ci + - uses: ./.github/actions/node/20 + - run: yarn test:appsec:plugins:ci - uses: codecov/codecov-action@v2 express: @@ -183,6 +187,8 @@ jobs: - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/18 - run: yarn test:appsec:plugins:ci + - uses: ./.github/actions/node/20 + - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:appsec:plugins:ci - uses: codecov/codecov-action@v2 diff --git a/.github/workflows/lambda.yml b/.github/workflows/lambda.yml index 6b550721b54..2600cc157f0 100644 --- a/.github/workflows/lambda.yml +++ b/.github/workflows/lambda.yml @@ -23,6 +23,8 @@ jobs: - run: yarn test:lambda:ci - uses: ./.github/actions/node/18 - run: yarn test:lambda:ci + - uses: ./.github/actions/node/20 + - run: yarn test:lambda:ci - uses: ./.github/actions/node/latest - run: yarn test:lambda:ci - if: always() diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 4f37decd53c..91765d7b52d 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -266,6 +266,8 @@ jobs: - run: yarn test:plugins:ci - uses: ./.github/actions/node/18 - run: yarn test:plugins:ci + - uses: ./.github/actions/node/20 + - run: yarn test:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - if: always() @@ -454,6 +456,8 @@ jobs: - run: yarn test:plugins:ci - uses: ./.github/actions/node/18 - run: yarn test:plugins:ci + - uses: ./.github/actions/node/20 + - run: yarn test:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - if: always() @@ -473,6 +477,8 @@ jobs: - run: yarn test:plugins:ci - uses: ./.github/actions/node/18 - run: yarn test:plugins:ci + - uses: ./.github/actions/node/20 + - run: yarn test:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - if: always() @@ -747,6 +753,8 @@ jobs: - run: yarn test:plugins:ci - uses: ./.github/actions/node/18 - run: yarn test:plugins:ci + - uses: ./.github/actions/node/20 + - run: yarn test:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - if: always() diff --git a/.github/workflows/profiling.yml b/.github/workflows/profiling.yml index 807cf67069e..05e9696cc48 100644 --- a/.github/workflows/profiling.yml +++ b/.github/workflows/profiling.yml @@ -31,6 +31,8 @@ jobs: - run: yarn test:profiler:ci - uses: ./.github/actions/node/18 - run: yarn test:profiler:ci + - uses: ./.github/actions/node/20 + - run: yarn test:profiler:ci - uses: ./.github/actions/node/latest - run: yarn test:profiler:ci - uses: codecov/codecov-action@v2 diff --git a/.github/workflows/tracing.yml b/.github/workflows/tracing.yml index 46cf886cdff..1b580a24aa3 100644 --- a/.github/workflows/tracing.yml +++ b/.github/workflows/tracing.yml @@ -31,6 +31,8 @@ jobs: - run: yarn test:trace:core:ci - uses: ./.github/actions/node/18 - run: yarn test:trace:core:ci + - uses: ./.github/actions/node/20 + - run: yarn test:trace:core:ci - uses: ./.github/actions/node/latest - run: yarn test:trace:core:ci - uses: codecov/codecov-action@v2 diff --git a/package.json b/package.json index 8612f4eead3..ab10d435c94 100644 --- a/package.json +++ b/package.json @@ -70,9 +70,9 @@ "dependencies": { "@datadog/native-appsec": "^4.0.0", "@datadog/native-iast-rewriter": "2.1.3", - "@datadog/native-iast-taint-tracking": "1.6.1", + "@datadog/native-iast-taint-tracking": "1.6.3", "@datadog/native-metrics": "^2.0.0", - "@datadog/pprof": "4.0.0", + "@datadog/pprof": "4.0.1", "@datadog/sketches-js": "^2.1.0", "@opentelemetry/api": "^1.0.0", "@opentelemetry/core": "^1.14.0", diff --git a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js index a5dddc6b888..e867a03c0f5 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js @@ -10,7 +10,7 @@ const iastContextFunctions = require('../../../../src/appsec/iast/iast-context') const { newTaintedString } = require('../../../../src/appsec/iast/taint-tracking/operations') const vulnerabilityReporter = require('../../../../src/appsec/iast/vulnerability-reporter') -describe('sql-injection-analyzer with knex', () => { +describe.skip('sql-injection-analyzer with knex', () => { withVersions('knex', 'knex', knexVersion => { if (!semver.satisfies(knexVersion, '>=2')) return diff --git a/yarn.lock b/yarn.lock index 6279483fb3f..88017d7df6a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -400,10 +400,10 @@ lru-cache "^7.14.0" node-gyp-build "^4.5.0" -"@datadog/native-iast-taint-tracking@1.6.1": - version "1.6.1" - resolved "https://registry.yarnpkg.com/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.6.1.tgz#fcf2f376797dbfc368d6cb3636b922372d2be50e" - integrity sha512-V1X0UbEROcEkqP4IIovqK9uu8jPXq80m8xOW1Vb6xJ9otO3eBphvDFDSa/OJ4pEYhajjjmGlraLlV6rXjaSGlQ== +"@datadog/native-iast-taint-tracking@1.6.3": + version "1.6.3" + resolved "https://registry.yarnpkg.com/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.6.3.tgz#cb2125f7bf18806da6f326c3a6b7210da3e05d8b" + integrity sha512-u/bBPNx0w8Bq+I+30enI99Ua2WPbVLkANGNyQNjW4tz2PHyeGI++HyzZV+fGm0YSy41FuHZq9EWP3SSDz/eSVw== dependencies: node-gyp-build "^3.9.0" @@ -415,10 +415,10 @@ node-addon-api "^6.1.0" node-gyp-build "^3.9.0" -"@datadog/pprof@3.2.0": - version "3.2.0" - resolved "https://registry.yarnpkg.com/@datadog/pprof/-/pprof-3.2.0.tgz#ab822caf18999a84f144dd4e0261d6e9274f4c5f" - integrity sha512-kOhWHCWB80djnMCr5KNKBAy1Ih/jK/PIj6yqnZwL1Wqni/h6IBPRUMhtIxcYJMRgsZVYrFXUV20AVXTZCzFokw== +"@datadog/pprof@4.0.1": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@datadog/pprof/-/pprof-4.0.1.tgz#f8629ecb62646d90ed49b6252dd0583d8d5001d3" + integrity sha512-TavqyiyQZOaUM9eQB07r8+K/T1CqKyOdsUGxpN79+BF+eOQBpTj/Cte6KdlhcUSKL3h5hSjC+vlgA7uW2qtVhA== dependencies: delay "^5.0.0" node-gyp-build "<4.0" @@ -1077,6 +1077,22 @@ available-typed-arrays@^1.0.5: resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== +aws-sdk@^2.1446.0: + version "2.1477.0" + resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1477.0.tgz#ec878ea5584fee217eb02ec8f6ebfd9ace47f908" + integrity sha512-DLsrKosrKRe5P1E+BcJAVpOXkma4oUOrcyBUridDmUhdf9k3jj5dnL1roFuDpTmNDDhK8a1tUgY3wmXoKQtv7A== + dependencies: + buffer "4.9.2" + events "1.1.1" + ieee754 "1.1.13" + jmespath "0.16.0" + querystring "0.2.0" + sax "1.2.1" + url "0.10.3" + util "^0.12.4" + uuid "8.0.0" + xml2js "0.5.0" + axios@^0.21.2: version "0.21.4" resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" @@ -1089,7 +1105,7 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -base64-js@^1.2.0: +base64-js@^1.0.2, base64-js@^1.2.0: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== @@ -1183,6 +1199,15 @@ buffer-from@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== +buffer@4.9.2: + version "4.9.2" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" + integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== + dependencies: + base64-js "^1.0.2" + ieee754 "^1.1.4" + isarray "^1.0.0" + builtins@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/builtins/-/builtins-5.0.1.tgz#87f6db9ab0458be728564fa81d876d8d74552fa9" @@ -1220,6 +1245,15 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" +call-bind@^1.0.4: + version "1.0.5" + resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513" + integrity sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ== + dependencies: + function-bind "^1.1.2" + get-intrinsic "^1.2.1" + set-function-length "^1.1.1" + caller-callsite@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-4.1.0.tgz#3e33cb1d910e7b09332d59a3503b9af7462f7295" @@ -1651,6 +1685,15 @@ default-require-extensions@^3.0.0: dependencies: strip-bom "^4.0.0" +define-data-property@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3" + integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== + dependencies: + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" + define-properties@^1.1.3, define-properties@^1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" @@ -2172,6 +2215,11 @@ events-to-array@^1.0.1: resolved "https://registry.yarnpkg.com/events-to-array/-/events-to-array-1.1.2.tgz#2d41f563e1fe400ed4962fe1a4d5c6a7539df7f6" integrity sha512-inRWzRY7nG+aXZxBzEqYKB3HPgwflZRopAjDCHv0whhRx+MTUr1ei0ICZUypdyE0HRm4L2d5VEcIqLD6yl+BFA== +events@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924" + integrity sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw== + express@^4.18.2: version "4.18.2" resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" @@ -2389,6 +2437,11 @@ function-bind@^1.1.1: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== +function-bind@^1.1.2: + version "1.1.2" + resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" + integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + function-loop@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/function-loop/-/function-loop-2.0.1.tgz#799c56ced01698cf12a1b80e4802e9dafc2ebada" @@ -2687,7 +2740,12 @@ iconv-lite@0.4.24: dependencies: safer-buffer ">= 2.1.2 < 3" -ieee754@^1.1.8: +ieee754@1.1.13: + version "1.1.13" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" + integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== + +ieee754@^1.1.4, ieee754@^1.1.8: version "1.2.1" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== @@ -2820,7 +2878,7 @@ ipaddr.js@^2.1.0: resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.1.0.tgz#2119bc447ff8c257753b196fc5f1ce08a4cdf39f" integrity sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ== -is-arguments@^1.1.1: +is-arguments@^1.0.4, is-arguments@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== @@ -2907,6 +2965,13 @@ is-fullwidth-code-point@^3.0.0: resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== +is-generator-function@^1.0.7: + version "1.0.10" + resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.0.10.tgz#f1558baf1ac17e0deea7c0415c438351ff2b3c72" + integrity sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A== + dependencies: + has-tostringtag "^1.0.0" + is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" @@ -2996,6 +3061,13 @@ is-typed-array@^1.1.10, is-typed-array@^1.1.9: gopd "^1.0.1" has-tostringtag "^1.0.0" +is-typed-array@^1.1.3: + version "1.1.12" + resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a" + integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg== + dependencies: + which-typed-array "^1.1.11" + is-typedarray@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" @@ -3125,6 +3197,11 @@ jest-docblock@^29.7.0: dependencies: detect-newline "^3.0.0" +jmespath@0.16.0: + version "0.16.0" + resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.16.0.tgz#b15b0a85dfd4d930d43e69ed605943c802785076" + integrity sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw== + js-sdsl@^4.1.4: version "4.1.4" resolved "https://registry.yarnpkg.com/js-sdsl/-/js-sdsl-4.1.4.tgz#78793c90f80e8430b7d8dc94515b6c77d98a26a6" @@ -4000,6 +4077,11 @@ proxyquire@^1.8.0: module-not-found-error "^1.0.0" resolve "~1.1.7" +punycode@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" + integrity sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw== + punycode@^2.0.0: version "2.3.0" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" @@ -4017,6 +4099,11 @@ qs@6.11.0: dependencies: side-channel "^1.0.4" +querystring@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" + integrity sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g== + queue-microtask@^1.2.2: version "1.2.3" resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" @@ -4295,6 +4382,16 @@ safe-regex-test@^1.0.0: resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== +sax@1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" + integrity sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA== + +sax@>=0.6.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.3.0.tgz#a5dbe77db3be05c9d1ee7785dbd3ea9de51593d0" + integrity sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA== + scheduler@^0.20.2: version "0.20.2" resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.20.2.tgz#4baee39436e34aa93b4874bddcbf0fe8b8b50e91" @@ -4375,6 +4472,16 @@ set-blocking@^2.0.0: resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== +set-function-length@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.1.1.tgz#4bc39fafb0307224a33e106a7d35ca1218d659ed" + integrity sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ== + dependencies: + define-data-property "^1.1.1" + get-intrinsic "^1.2.1" + gopd "^1.0.1" + has-property-descriptors "^1.0.0" + setimmediate@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" @@ -4934,11 +5041,30 @@ uri-js@^4.2.2: dependencies: punycode "^2.1.0" +url@0.10.3: + version "0.10.3" + resolved "https://registry.yarnpkg.com/url/-/url-0.10.3.tgz#021e4d9c7705f21bbf37d03ceb58767402774c64" + integrity sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ== + dependencies: + punycode "1.3.2" + querystring "0.2.0" + util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== +util@^0.12.4: + version "0.12.5" + resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc" + integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA== + dependencies: + inherits "^2.0.3" + is-arguments "^1.0.4" + is-generator-function "^1.0.7" + is-typed-array "^1.1.3" + which-typed-array "^1.1.2" + utils-merge@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" @@ -4949,6 +5075,11 @@ uuid-parse@^1.1.0: resolved "https://registry.yarnpkg.com/uuid-parse/-/uuid-parse-1.1.0.tgz#7061c5a1384ae0e1f943c538094597e1b5f3a65b" integrity sha512-OdmXxA8rDsQ7YpNVbKSJkNzTw2I+S5WsbMDnCtIWSQaosNAcWtFuI/YK1TjzUI6nbkgiqEyh8gWngfcv8Asd9A== +uuid@8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.0.0.tgz#bc6ccf91b5ff0ac07bbcdbf1c7c4e150db4dbb6c" + integrity sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw== + uuid@^8.3.2: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" @@ -4997,6 +5128,17 @@ which-typed-array@^1.1.10, which-typed-array@^1.1.9: has-tostringtag "^1.0.0" is-typed-array "^1.1.10" +which-typed-array@^1.1.11, which-typed-array@^1.1.2: + version "1.1.13" + resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.13.tgz#870cd5be06ddb616f504e7b039c4c24898184d36" + integrity sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow== + dependencies: + available-typed-arrays "^1.0.5" + call-bind "^1.0.4" + for-each "^0.3.3" + gopd "^1.0.1" + has-tostringtag "^1.0.0" + which@2.0.2, which@^2.0.1, which@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" @@ -5071,6 +5213,19 @@ ws@^7, ws@^7.5.5: resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== +xml2js@0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7" + integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA== + dependencies: + sax ">=0.6.0" + xmlbuilder "~11.0.0" + +xmlbuilder@~11.0.0: + version "11.0.1" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" + integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== + xtend@^4.0.0: version "4.0.2" resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" From 8160dda530969acb8ee908800848992b90abe9c5 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Wed, 25 Oct 2023 11:19:30 +0200 Subject: [PATCH 027/147] Add a security review question to the PR template (#3569) --- .github/pull_request_template.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 38c11e11c31..4df80bee84f 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -22,3 +22,11 @@ ### Additional Notes + +### Security +Datadog employees: +- [ ] If this PR touches code that signs or publishes builds or packages, or handles credentials of any kind, I've requested a review from `@DataDog/security-design-and-guidance`. +- [ ] This PR doesn't touch any of that. + +Unsure? Have a question? Request a review! + From c6b03eab64ed62c4785e3a30a9023882de7e3859 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Wed, 25 Oct 2023 18:27:48 +0200 Subject: [PATCH 028/147] Emit thread names in wall profiles (#3726) --- integration-tests/profiler.spec.js | 7 ++++++- packages/dd-trace/src/profiling/profilers/wall.js | 8 +++++++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index 48b85028001..79cd7efe09b 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -117,14 +117,17 @@ describe('profiler', () => { const spanKey = strings.dedup('span id') const rootSpanKey = strings.dedup('local root span id') const endpointKey = strings.dedup('trace endpoint') + const threadNameKey = strings.dedup('thread name') + const threadNameValue = strings.dedup('Main Event Loop') for (const sample of prof.sample) { - let ts, spanId, rootSpanId, endpoint + let ts, spanId, rootSpanId, endpoint, threadName for (const label of sample.label) { switch (label.key) { case tsKey: ts = label.num; break case spanKey: spanId = label.str; break case rootSpanKey: rootSpanId = label.str; break case endpointKey: endpoint = label.str; break + case threadNameKey: threadName = label.str; break default: assert.fail(`Unexpected label key ${strings.dedup(label.key)}`) } } @@ -132,6 +135,8 @@ describe('profiler', () => { assert.isDefined(ts) assert.isTrue(ts <= procEnd) assert.isTrue(ts >= procStart) + // Thread name must be defined and exactly equal "Main Event Loop" + assert.equal(threadName, threadNameValue) // Either all or none of span-related labels are defined if (spanId || rootSpanId || endpoint) { assert.isDefined(spanId) diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index 57ca7e1b242..57813aed103 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -12,6 +12,12 @@ const beforeCh = dc.channel('dd-trace:storage:before') const enterCh = dc.channel('dd-trace:storage:enter') const profilerTelemetryMetrics = telemetryMetrics.manager.namespace('profilers') +const threadName = (function () { + const { isMainThread, threadId } = require('node:worker_threads') + const name = isMainThread ? 'Main' : `Worker #${threadId}` + return `${name} Event Loop` +})() + let kSampleCount function getActiveSpan () { @@ -24,7 +30,7 @@ function getStartedSpans (context) { } function generateLabels ({ context: { spanId, rootSpanId, webTags, endpoint }, timestamp }) { - const labels = {} + const labels = { 'thread name': threadName } if (spanId) { labels['span id'] = spanId } From 18d2f961ffe8acb9b402cc8ecc8bf4d89af2569d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Thu, 26 Oct 2023 11:38:21 +0200 Subject: [PATCH 029/147] [ci-visibility] Add custom tags capability to playwright tests (#3741) --- .../playwright-tests/landing-page-test.js | 8 +++++ .../playwright/playwright.spec.js | 7 ++++ .../src/playwright.js | 17 ++++++---- .../datadog-plugin-playwright/src/index.js | 5 ++- packages/dd-trace/src/plugins/util/test.js | 30 ++++++++++++++++- .../dd-trace/test/plugins/util/test.spec.js | 32 ++++++++++++++++++- 6 files changed, 90 insertions(+), 9 deletions(-) diff --git a/integration-tests/ci-visibility/playwright-tests/landing-page-test.js b/integration-tests/ci-visibility/playwright-tests/landing-page-test.js index 0cf4193cf4a..adc5ee01e28 100644 --- a/integration-tests/ci-visibility/playwright-tests/landing-page-test.js +++ b/integration-tests/ci-visibility/playwright-tests/landing-page-test.js @@ -20,4 +20,12 @@ test.describe('playwright', () => { 'Hello Warld' ]) }) + test('should work with annotated tests', async ({ page }) => { + test.info().annotations.push({ type: 'DD_TAGS[test.memory.usage]', description: 'low' }) + // this is malformed and should be ignored + test.info().annotations.push({ type: 'DD_TAGS[test.invalid', description: 'high' }) + await expect(page.locator('.hello-world')).toHaveText([ + 'Hello World' + ]) + }) }) diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index 3ec6a6aacef..49d483ea99e 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -86,6 +86,7 @@ versions.forEach((version) => { 'landing-page-test.js.should work with passing tests', 'landing-page-test.js.should work with skipped tests', 'landing-page-test.js.should work with fixme', + 'landing-page-test.js.should work with annotated tests', 'todo-list-page-test.js.should work with failing tests', 'todo-list-page-test.js.should work with fixme root' ]) @@ -104,6 +105,12 @@ versions.forEach((version) => { assert.equal(stepEvent.content.name, 'playwright.step') assert.property(stepEvent.content.meta, 'playwright.step') }) + const annotatedTest = testEvents.find(test => + test.content.resource === 'landing-page-test.js.should work with annotated tests' + ) + + assert.propertyVal(annotatedTest.content.meta, 'test.memory.usage', 'low') + assert.notProperty(annotatedTest.content.meta, 'test.invalid') }).then(() => done()).catch(done) childProcess = exec( diff --git a/packages/datadog-instrumentations/src/playwright.js b/packages/datadog-instrumentations/src/playwright.js index 6f8753bb9e3..a209e228ffb 100644 --- a/packages/datadog-instrumentations/src/playwright.js +++ b/packages/datadog-instrumentations/src/playwright.js @@ -1,5 +1,6 @@ const { addHook, channel, AsyncResource } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') +const { parseAnnotations } = require('../../dd-trace/src/plugins/util/test') const testStartCh = channel('ci:playwright:test:start') const testFinishCh = channel('ci:playwright:test:finish') @@ -103,7 +104,11 @@ function testBeginHandler (test) { }) } -function testEndHandler (test, testStatus, error) { +function testEndHandler (test, annotations, testStatus, error) { + let annotationTags + if (annotations.length) { + annotationTags = parseAnnotations(annotations) + } const { _requireFile: testSuiteAbsolutePath, results, _type } = test if (_type === 'beforeAll' || _type === 'afterAll') { @@ -113,7 +118,7 @@ function testEndHandler (test, testStatus, error) { const testResult = results[results.length - 1] const testAsyncResource = testToAr.get(test) testAsyncResource.runInAsyncScope(() => { - testFinishCh.publish({ testStatus, steps: testResult.steps, error }) + testFinishCh.publish({ testStatus, steps: testResult.steps, error, extraTags: annotationTags }) }) if (!testSuiteToTestStatuses.has(testSuiteAbsolutePath)) { @@ -172,7 +177,7 @@ function dispatcherHook (dispatcherExport) { const { results } = test const testResult = results[results.length - 1] - testEndHandler(test, STATUS_TO_TEST_STATUS[testResult.status], testResult.error) + testEndHandler(test, params.annotations, STATUS_TO_TEST_STATUS[testResult.status], testResult.error) } }) @@ -200,10 +205,10 @@ function dispatcherHookNew (dispatcherExport, runWrapper) { const test = getTestByTestId(dispatcher, testId) testBeginHandler(test) }) - worker.on('testEnd', ({ testId, status, errors }) => { + worker.on('testEnd', ({ testId, status, errors, annotations }) => { const test = getTestByTestId(dispatcher, testId) - testEndHandler(test, STATUS_TO_TEST_STATUS[status], errors && errors[0]) + testEndHandler(test, annotations, STATUS_TO_TEST_STATUS[status], errors && errors[0]) }) return worker @@ -230,7 +235,7 @@ function runnerHook (runnerExport, playwrightVersion) { // because they were skipped tests.forEach(test => { testBeginHandler(test) - testEndHandler(test, 'skip') + testEndHandler(test, [], 'skip') }) }) diff --git a/packages/datadog-plugin-playwright/src/index.js b/packages/datadog-plugin-playwright/src/index.js index 295687e198d..928477ffc3b 100644 --- a/packages/datadog-plugin-playwright/src/index.js +++ b/packages/datadog-plugin-playwright/src/index.js @@ -72,7 +72,7 @@ class PlaywrightPlugin extends CiPlugin { this.enter(span, store) }) - this.addSub('ci:playwright:test:finish', ({ testStatus, steps, error }) => { + this.addSub('ci:playwright:test:finish', ({ testStatus, steps, error, extraTags }) => { const store = storage.getStore() const span = store && store.span if (!span) return @@ -82,6 +82,9 @@ class PlaywrightPlugin extends CiPlugin { if (error) { span.setTag('error', error) } + if (extraTags) { + span.addTags(extraTags) + } steps.forEach(step => { const stepStartTime = step.startTime.getTime() diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index 976b4043226..d6d0daf93b4 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -118,7 +118,8 @@ module.exports = { fromCoverageMapToCoverage, getTestLineStart, getCallSites, - removeInvalidMetadata + removeInvalidMetadata, + parseAnnotations } // Returns pkg manager and its version, separated by '-', e.g. npm-8.15.0 or yarn-1.22.19 @@ -492,3 +493,30 @@ function getCallSites () { return v8StackTrace } + +/** + * Gets an object of test tags from an Playwright annotations array. + * @param {Object[]} annotations - Annotations from a Playwright test. + * @param {string} annotations[].type - Type of annotation. A string of the shape DD_TAGS[$tag_name]. + * @param {string} annotations[].description - Value of the tag. + */ +function parseAnnotations (annotations) { + return annotations.reduce((tags, annotation) => { + if (!annotation?.type) { + return tags + } + const { type, description } = annotation + if (type.startsWith('DD_TAGS')) { + const regex = /\[(.*?)\]/ + const match = regex.exec(type) + let tagValue = '' + if (match) { + tagValue = match[1] + } + if (tagValue) { + tags[tagValue] = description + } + } + return tags + }, {}) +} diff --git a/packages/dd-trace/test/plugins/util/test.spec.js b/packages/dd-trace/test/plugins/util/test.spec.js index 06e3d29fa55..4a992955397 100644 --- a/packages/dd-trace/test/plugins/util/test.spec.js +++ b/packages/dd-trace/test/plugins/util/test.spec.js @@ -13,7 +13,8 @@ const { getCoveredFilenamesFromCoverage, mergeCoverage, resetCoverage, - removeInvalidMetadata + removeInvalidMetadata, + parseAnnotations } = require('../../../src/plugins/util/test') const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA, CI_PIPELINE_URL } = require('../../../src/plugins/util/tags') @@ -218,3 +219,32 @@ describe('metadata validation', () => { }) }) }) + +describe('parseAnnotations', () => { + it('parses correctly shaped annotations', () => { + const tags = parseAnnotations([ + { + type: 'DD_TAGS[test.requirement]', + description: 'high' + }, + { + type: 'DD_TAGS[test.responsible_team]', + description: 'sales' + } + ]) + expect(tags).to.eql({ + 'test.requirement': 'high', + 'test.responsible_team': 'sales' + }) + }) + it('does not crash with invalid arguments', () => { + const tags = parseAnnotations([ + {}, + 'invalid', + { type: 'DD_TAGS', description: 'yeah' }, + { type: 'DD_TAGS[v', description: 'invalid' }, + { type: 'test.requirement', description: 'sure' } + ]) + expect(tags).to.eql({}) + }) +}) From 5d4076d64aa49df8d8540218eec7a49747228aaa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Thu, 26 Oct 2023 12:35:58 +0200 Subject: [PATCH 030/147] [ci-visibility] Instrument suite parsing errors as failed suites (#3735) --- integration-tests/ci-visibility.spec.js | 33 +++++++++++++++++++ .../test-parsing-error/parsing-error-2.js | 7 ++++ .../test-parsing-error/parsing-error.js | 7 ++++ packages/datadog-instrumentations/src/jest.js | 14 ++++---- packages/datadog-plugin-jest/src/index.js | 6 ++-- 5 files changed, 58 insertions(+), 9 deletions(-) create mode 100644 integration-tests/ci-visibility/test-parsing-error/parsing-error-2.js create mode 100644 integration-tests/ci-visibility/test-parsing-error/parsing-error.js diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index 4ff2ec1e0ec..b84dea50d54 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -26,6 +26,7 @@ const { TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN } = require('../packages/dd-trace/src/plugins/util/test') +const { ERROR_MESSAGE } = require('../packages/dd-trace/src/constants') const hookFile = 'dd-trace/loader-hook.mjs' @@ -413,6 +414,38 @@ testFrameworks.forEach(({ done() }) }) + it('reports parsing errors in the test file', (done) => { + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const suites = events.filter(event => event.type === 'test_suite_end') + assert.equal(suites.length, 2) + + const resourceNames = suites.map(suite => suite.content.resource) + + assert.includeMembers(resourceNames, [ + 'test_suite.ci-visibility/test-parsing-error/parsing-error-2.js', + 'test_suite.ci-visibility/test-parsing-error/parsing-error.js' + ]) + suites.forEach(suite => { + assert.equal(suite.content.meta[TEST_STATUS], 'fail') + assert.include(suite.content.meta[ERROR_MESSAGE], 'chao') + }) + }) + childProcess = fork(testFile, { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TESTS_TO_RUN: 'test-parsing-error/parsing-error' + }, + stdio: 'pipe' + }) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) } it('can run tests and report spans', (done) => { diff --git a/integration-tests/ci-visibility/test-parsing-error/parsing-error-2.js b/integration-tests/ci-visibility/test-parsing-error/parsing-error-2.js new file mode 100644 index 00000000000..81286c0ee5d --- /dev/null +++ b/integration-tests/ci-visibility/test-parsing-error/parsing-error-2.js @@ -0,0 +1,7 @@ +const { expect } = require('chao') + +describe('test-parsing-error-2', () => { + it('can report tests', () => { + expect(1 + 2).to.equal(3) + }) +}) diff --git a/integration-tests/ci-visibility/test-parsing-error/parsing-error.js b/integration-tests/ci-visibility/test-parsing-error/parsing-error.js new file mode 100644 index 00000000000..e6d9108a9ea --- /dev/null +++ b/integration-tests/ci-visibility/test-parsing-error/parsing-error.js @@ -0,0 +1,7 @@ +const { expect } = require('chao') + +describe('test-parsing-error', () => { + it('can report tests', () => { + expect(1 + 2).to.equal(3) + }) +}) diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index c8e5ee60c52..bd82c46484e 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -363,23 +363,23 @@ function jestAdapterWrapper (jestAdapter, jestVersion) { status = 'fail' } - const coverageFiles = getCoveredFilenamesFromCoverage(environment.global.__coverage__) - .map(filename => getTestSuitePath(filename, environment.rootDir)) - /** * Child processes do not each request ITR configuration, so the jest's parent process * needs to pass them the configuration. This is done via _ddTestCodeCoverageEnabled, which * controls whether coverage is reported. - */ - if (coverageFiles && - environment.testEnvironmentOptions && - environment.testEnvironmentOptions._ddTestCodeCoverageEnabled) { + */ + if (environment.testEnvironmentOptions?._ddTestCodeCoverageEnabled) { + const coverageFiles = getCoveredFilenamesFromCoverage(environment.global.__coverage__) + .map(filename => getTestSuitePath(filename, environment.rootDir)) asyncResource.runInAsyncScope(() => { testSuiteCodeCoverageCh.publish([...coverageFiles, environment.testSuite]) }) } testSuiteFinishCh.publish({ status, errorMessage }) return suiteResults + }).catch(error => { + testSuiteFinishCh.publish({ status: 'fail', error }) + throw error }) }) }) diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index cc659c888eb..d1990bb8ddb 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -150,9 +150,11 @@ class JestPlugin extends CiPlugin { }) }) - this.addSub('ci:jest:test-suite:finish', ({ status, errorMessage }) => { + this.addSub('ci:jest:test-suite:finish', ({ status, errorMessage, error }) => { this.testSuiteSpan.setTag(TEST_STATUS, status) - if (errorMessage) { + if (error) { + this.testSuiteSpan.setTag('error', error) + } else if (errorMessage) { this.testSuiteSpan.setTag('error', new Error(errorMessage)) } this.testSuiteSpan.finish() From eac2c1e992854bcba3ece752aed2020aa538d605 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Thu, 26 Oct 2023 12:42:08 +0200 Subject: [PATCH 031/147] [ci-visibility] Better logs for intelligent test runner (#3742) --- packages/datadog-instrumentations/src/cucumber.js | 5 +++++ packages/datadog-instrumentations/src/jest.js | 7 +++++++ packages/datadog-instrumentations/src/mocha.js | 5 +++++ .../intelligent-test-runner/get-itr-configuration.js | 6 ++++-- .../intelligent-test-runner/get-skippable-suites.js | 2 ++ 5 files changed, 23 insertions(+), 2 deletions(-) diff --git a/packages/datadog-instrumentations/src/cucumber.js b/packages/datadog-instrumentations/src/cucumber.js index fbadf49ee48..b8285cfcda6 100644 --- a/packages/datadog-instrumentations/src/cucumber.js +++ b/packages/datadog-instrumentations/src/cucumber.js @@ -296,6 +296,11 @@ addHook({ const filteredPickles = getFilteredPickles(this, skippableSuites) const { picklesToRun } = filteredPickles isSuitesSkipped = picklesToRun.length !== this.pickleIds.length + + log.debug( + () => `${picklesToRun.length} out of ${this.pickleIds.length} suites are going to run.` + ) + this.pickleIds = picklesToRun skippedSuites = Array.from(filteredPickles.skippedSuites) diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index bd82c46484e..9d2474f7496 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -208,6 +208,11 @@ addHook({ const rootDir = test && test.context && test.context.config && test.context.config.rootDir const jestSuitesToRun = getJestSuitesToRun(skippableSuites, shardedTests, rootDir || process.cwd()) + + log.debug( + () => `${jestSuitesToRun.suitesToRun.length} out of ${shardedTests.length} suites are going to run.` + ) + hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites @@ -508,6 +513,8 @@ addHook({ const jestSuitesToRun = getJestSuitesToRun(skippableSuites, tests, rootDir) + log.debug(() => `${jestSuitesToRun.suitesToRun.length} out of ${tests.length} suites are going to run.`) + hasUnskippableSuites = jestSuitesToRun.hasUnskippableSuites hasForcedToRunSuites = jestSuitesToRun.hasForcedToRunSuites diff --git a/packages/datadog-instrumentations/src/mocha.js b/packages/datadog-instrumentations/src/mocha.js index f87d5ffe8e8..53f7547f587 100644 --- a/packages/datadog-instrumentations/src/mocha.js +++ b/packages/datadog-instrumentations/src/mocha.js @@ -396,6 +396,11 @@ addHook({ const { suitesToRun } = filteredSuites isSuitesSkipped = suitesToRun.length !== runner.suite.suites.length + + log.debug( + () => `${suitesToRun.length} out of ${runner.suite.suites.length} suites are going to run.` + ) + runner.suite.suites = suitesToRun skippedSuites = Array.from(filteredSuites.skippedSuites) diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js index d66f312086d..40c7f1ad8c6 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js @@ -1,5 +1,6 @@ const request = require('../../exporters/common/request') const id = require('../../id') +const log = require('../../log') function getItrConfiguration ({ url, @@ -72,8 +73,9 @@ function getItrConfiguration ({ } } } = JSON.parse(res) - - done(null, { isCodeCoverageEnabled, isSuitesSkippingEnabled }) + const config = { isCodeCoverageEnabled, isSuitesSkippingEnabled } + log.debug(() => `Received settings: ${config}`) + done(null, config) } catch (err) { done(err) } diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js index f5936267c8b..04448e9a651 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js @@ -1,4 +1,5 @@ const request = require('../../exporters/common/request') +const log = require('../../log') function getSkippableSuites ({ url, @@ -73,6 +74,7 @@ function getSkippableSuites ({ } return { suite, name } }) + log.debug(() => `Number of received skippable ${testLevel}s: ${skippableSuites.length}`) done(null, skippableSuites) } catch (err) { done(err) From d9945646d6ff164c683015f300e55ed6a22a4bdb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Thu, 26 Oct 2023 13:03:24 +0200 Subject: [PATCH 032/147] Remove user credentials from `DD_GIT_REPOSITORY_URL` (#3744) Co-authored-by: Ugaitz Urien --- packages/dd-trace/src/config.js | 10 ++++---- packages/dd-trace/src/git_properties.js | 31 +++++++++++++------------ packages/dd-trace/test/config.spec.js | 5 ++++ 3 files changed, 27 insertions(+), 19 deletions(-) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index c060ad62278..1cb22a4334f 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -10,7 +10,7 @@ const coalesce = require('koalas') const tagger = require('./tagger') const { isTrue, isFalse } = require('./util') const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('./plugins/util/tags') -const { getGitMetadataFromGitProperties } = require('./git_properties') +const { getGitMetadataFromGitProperties, removeUserSensitiveInfo } = require('./git_properties') const { updateConfig } = require('./telemetry') const { getIsGCPFunction, getIsAzureFunctionConsumptionPlan } = require('./serverless') @@ -638,9 +638,11 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this.memcachedCommandEnabled = isTrue(DD_TRACE_MEMCACHED_COMMAND_ENABLED) if (this.gitMetadataEnabled) { - this.repositoryUrl = coalesce( - process.env.DD_GIT_REPOSITORY_URL, - this.tags[GIT_REPOSITORY_URL] + this.repositoryUrl = removeUserSensitiveInfo( + coalesce( + process.env.DD_GIT_REPOSITORY_URL, + this.tags[GIT_REPOSITORY_URL] + ) ) this.commitSHA = coalesce( process.env.DD_GIT_COMMIT_SHA, diff --git a/packages/dd-trace/src/git_properties.js b/packages/dd-trace/src/git_properties.js index 83b0a269f61..d8a172e1016 100644 --- a/packages/dd-trace/src/git_properties.js +++ b/packages/dd-trace/src/git_properties.js @@ -1,6 +1,20 @@ const commitSHARegex = /git\.commit\.sha=([a-f\d]{40})/ const repositoryUrlRegex = /git\.repository_url=([\w\d:@/.-]+)/ +function removeUserSensitiveInfo (repositoryUrl) { + try { + // repository URLs can contain username and password, so we want to filter those out + const parsedUrl = new URL(repositoryUrl) + if (parsedUrl.username || parsedUrl.password) { + return `${parsedUrl.origin}${parsedUrl.pathname}` + } + return repositoryUrl + } catch (e) { + // if protocol isn't https, no password will be used + return repositoryUrl + } +} + function getGitMetadataFromGitProperties (gitPropertiesString) { if (!gitPropertiesString) { return {} @@ -9,24 +23,11 @@ function getGitMetadataFromGitProperties (gitPropertiesString) { const repositoryUrlMatch = gitPropertiesString.match(repositoryUrlRegex) const repositoryUrl = repositoryUrlMatch ? repositoryUrlMatch[1] : undefined - let parsedUrl = repositoryUrl - - if (repositoryUrl) { - try { - // repository URLs can contain username and password, so we want to filter those out - parsedUrl = new URL(repositoryUrl) - if (parsedUrl.password) { - parsedUrl = `${parsedUrl.origin}${parsedUrl.pathname}` - } - } catch (e) { - // if protocol isn't https, no password will be used - } - } return { commitSHA: commitSHAMatch ? commitSHAMatch[1] : undefined, - repositoryUrl: parsedUrl + repositoryUrl: removeUserSensitiveInfo(repositoryUrl) } } -module.exports = { getGitMetadataFromGitProperties } +module.exports = { getGitMetadataFromGitProperties, removeUserSensitiveInfo } diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index c099410ce96..271f57c4ce4 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -1266,6 +1266,11 @@ describe('Config', () => { expect(config).to.have.property('commitSHA', DUMMY_COMMIT_SHA) expect(config).to.have.property('repositoryUrl', DUMMY_REPOSITORY_URL) }) + it('reads DD_GIT_* env vars and filters out user data', () => { + process.env.DD_GIT_REPOSITORY_URL = 'https://user:password@github.com/DataDog/dd-trace-js.git' + const config = new Config({}) + expect(config).to.have.property('repositoryUrl', 'https://github.com/DataDog/dd-trace-js.git') + }) it('reads DD_TAGS env var', () => { process.env.DD_TAGS = `git.commit.sha:${DUMMY_COMMIT_SHA},git.repository_url:${DUMMY_REPOSITORY_URL}` process.env.DD_GIT_REPOSITORY_URL = DUMMY_REPOSITORY_URL From 58f6436ae68d6b02d8e3f7bc550625530adb0d8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Thu, 26 Oct 2023 14:40:08 +0200 Subject: [PATCH 033/147] [ci-visibility] Add test for playwright custom metrics (#3746) --- .../ci-visibility/playwright-tests/landing-page-test.js | 1 + integration-tests/playwright/playwright.spec.js | 1 + 2 files changed, 2 insertions(+) diff --git a/integration-tests/ci-visibility/playwright-tests/landing-page-test.js b/integration-tests/ci-visibility/playwright-tests/landing-page-test.js index adc5ee01e28..4e05a904176 100644 --- a/integration-tests/ci-visibility/playwright-tests/landing-page-test.js +++ b/integration-tests/ci-visibility/playwright-tests/landing-page-test.js @@ -22,6 +22,7 @@ test.describe('playwright', () => { }) test('should work with annotated tests', async ({ page }) => { test.info().annotations.push({ type: 'DD_TAGS[test.memory.usage]', description: 'low' }) + test.info().annotations.push({ type: 'DD_TAGS[test.memory.allocations]', description: 16 }) // this is malformed and should be ignored test.info().annotations.push({ type: 'DD_TAGS[test.invalid', description: 'high' }) await expect(page.locator('.hello-world')).toHaveText([ diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index 49d483ea99e..bb0329086b4 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -110,6 +110,7 @@ versions.forEach((version) => { ) assert.propertyVal(annotatedTest.content.meta, 'test.memory.usage', 'low') + assert.propertyVal(annotatedTest.content.metrics, 'test.memory.allocations', 16) assert.notProperty(annotatedTest.content.meta, 'test.invalid') }).then(() => done()).catch(done) From 6c4932cf4945073fb4af2925d06a8368c12ddb3a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Thu, 26 Oct 2023 15:27:01 +0200 Subject: [PATCH 034/147] [ci-visibility] Improve test status in test sessions for jest and mocha (#3736) --- integration-tests/ci-visibility.spec.js | 182 ++++++++++++++++++ .../ci-visibility/test/fail-test.js | 7 + packages/datadog-instrumentations/src/jest.js | 28 ++- .../datadog-instrumentations/src/mocha.js | 12 +- packages/datadog-plugin-jest/src/index.js | 8 +- packages/datadog-plugin-mocha/src/index.js | 8 +- 6 files changed, 239 insertions(+), 6 deletions(-) create mode 100644 integration-tests/ci-visibility/test/fail-test.js diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index b84dea50d54..538713ff842 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -545,6 +545,40 @@ testFrameworks.forEach(({ }) describe('agentless', () => { + it('reports errors in test sessions', (done) => { + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_STATUS, 'fail') + const errorMessage = name === 'mocha' ? 'Failed tests: 1' : 'Failed test suites: 1. Failed tests: 1' + assert.include(testSession.meta[ERROR_MESSAGE], errorMessage) + }) + + let TESTS_TO_RUN = 'test/fail-test' + if (name === 'mocha') { + TESTS_TO_RUN = JSON.stringify([ + './test/fail-test.js' + ]) + } + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TESTS_TO_RUN + }, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) it('does not init if DD_API_KEY is not set', (done) => { receiver.assertMessageReceived(() => { done(new Error('Should not create spans')) @@ -762,6 +796,44 @@ testFrameworks.forEach(({ } ) }) + it('marks the test session as skipped if every suite is skipped', (done) => { + receiver.setSuitesToSkip( + [ + { + type: 'suite', + attributes: { + suite: 'ci-visibility/test/ci-visibility-test.js' + } + }, + { + type: 'suite', + attributes: { + suite: 'ci-visibility/test/ci-visibility-test-2.js' + } + } + ] + ) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_STATUS, 'skip') + }) + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: getCiVisAgentlessConfig(receiver.port), + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) it('does not skip tests if git metadata upload fails', (done) => { receiver.setSuitesToSkip([{ type: 'suite', @@ -1057,6 +1129,40 @@ testFrameworks.forEach(({ }) }) }) + it('reports errors in test sessions', (done) => { + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_STATUS, 'fail') + const errorMessage = name === 'mocha' ? 'Failed tests: 1' : 'Failed test suites: 1. Failed tests: 1' + assert.include(testSession.meta[ERROR_MESSAGE], errorMessage) + }) + + let TESTS_TO_RUN = 'test/fail-test' + if (name === 'mocha') { + TESTS_TO_RUN = JSON.stringify([ + './test/fail-test.js' + ]) + } + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { + ...getCiVisEvpProxyConfig(receiver.port), + TESTS_TO_RUN + }, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) it('can report git metadata', (done) => { const infoRequestPromise = receiver.payloadReceived(({ url }) => url === '/info') const searchCommitsRequestPromise = receiver.payloadReceived( @@ -1242,6 +1348,82 @@ testFrameworks.forEach(({ } ) }) + it('marks the test session as skipped if every suite is skipped', (done) => { + receiver.setSuitesToSkip( + [ + { + type: 'suite', + attributes: { + suite: 'ci-visibility/test/ci-visibility-test.js' + } + }, + { + type: 'suite', + attributes: { + suite: 'ci-visibility/test/ci-visibility-test-2.js' + } + } + ] + ) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_STATUS, 'skip') + }) + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: getCiVisAgentlessConfig(receiver.port), + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) + it('marks the test session as skipped if every suite is skipped', (done) => { + receiver.setSuitesToSkip( + [ + { + type: 'suite', + attributes: { + suite: 'ci-visibility/test/ci-visibility-test.js' + } + }, + { + type: 'suite', + attributes: { + suite: 'ci-visibility/test/ci-visibility-test-2.js' + } + } + ] + ) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSession = events.find(event => event.type === 'test_session_end').content + assert.propertyVal(testSession.meta, TEST_STATUS, 'skip') + }) + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: getCiVisEvpProxyConfig(receiver.port), + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) it('does not skip tests if git metadata upload fails', (done) => { receiver.assertPayloadReceived(() => { const error = new Error('should not request skippable') diff --git a/integration-tests/ci-visibility/test/fail-test.js b/integration-tests/ci-visibility/test/fail-test.js new file mode 100644 index 00000000000..efca6e21432 --- /dev/null +++ b/integration-tests/ci-visibility/test/fail-test.js @@ -0,0 +1,7 @@ +const { expect } = require('chai') + +describe('fail', () => { + it('can report failed tests', () => { + expect(1 + 2).to.equal(4) + }) +}) diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index 9d2474f7496..233bad2bfc3 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -277,7 +277,16 @@ function cliWrapper (cli, jestVersion) { const result = await runCLI.apply(this, arguments) - const { results: { success, coverageMap } } = result + const { + results: { + success, + coverageMap, + numFailedTestSuites, + numFailedTests, + numTotalTests, + numTotalTestSuites + } + } = result let testCodeCoverageLinesTotal try { @@ -286,17 +295,30 @@ function cliWrapper (cli, jestVersion) { } catch (e) { // ignore errors } + let status, error + + if (success) { + if (numTotalTests === 0 && numTotalTestSuites === 0) { + status = 'skip' + } else { + status = 'pass' + } + } else { + status = 'fail' + error = new Error(`Failed test suites: ${numFailedTestSuites}. Failed tests: ${numFailedTests}`) + } sessionAsyncResource.runInAsyncScope(() => { testSessionFinishCh.publish({ - status: success ? 'pass' : 'fail', + status, isSuitesSkipped, isSuitesSkippingEnabled, isCodeCoverageEnabled, testCodeCoverageLinesTotal, numSkippedSuites, hasUnskippableSuites, - hasForcedToRunSuites + hasForcedToRunSuites, + error }) }) diff --git a/packages/datadog-instrumentations/src/mocha.js b/packages/datadog-instrumentations/src/mocha.js index 53f7547f587..462fb42d20f 100644 --- a/packages/datadog-instrumentations/src/mocha.js +++ b/packages/datadog-instrumentations/src/mocha.js @@ -133,11 +133,20 @@ function mochaHook (Runner) { this.once('end', testRunAsyncResource.bind(function () { let status = 'pass' + let error if (this.stats) { status = this.stats.failures === 0 ? 'pass' : 'fail' + if (this.stats.tests === 0) { + status = 'skip' + } } else if (this.failures !== 0) { status = 'fail' } + + if (status === 'fail') { + error = new Error(`Failed tests: ${this.failures}.`) + } + testFileToSuiteAr.clear() let testCodeCoverageLinesTotal @@ -157,7 +166,8 @@ function mochaHook (Runner) { testCodeCoverageLinesTotal, numSkippedSuites: skippedSuites.length, hasForcedToRunSuites: isForcedToRun, - hasUnskippableSuites: !!unskippableSuites.length + hasUnskippableSuites: !!unskippableSuites.length, + error }) })) diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index d1990bb8ddb..3eaceb034aa 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -54,11 +54,17 @@ class JestPlugin extends CiPlugin { testCodeCoverageLinesTotal, numSkippedSuites, hasUnskippableSuites, - hasForcedToRunSuites + hasForcedToRunSuites, + error }) => { this.testSessionSpan.setTag(TEST_STATUS, status) this.testModuleSpan.setTag(TEST_STATUS, status) + if (error) { + this.testSessionSpan.setTag('error', error) + this.testModuleSpan.setTag('error', error) + } + addIntelligentTestRunnerSpanTags( this.testSessionSpan, this.testModuleSpan, diff --git a/packages/datadog-plugin-mocha/src/index.js b/packages/datadog-plugin-mocha/src/index.js index 3f7009afa39..c8af76247b1 100644 --- a/packages/datadog-plugin-mocha/src/index.js +++ b/packages/datadog-plugin-mocha/src/index.js @@ -150,13 +150,19 @@ class MochaPlugin extends CiPlugin { testCodeCoverageLinesTotal, numSkippedSuites, hasForcedToRunSuites, - hasUnskippableSuites + hasUnskippableSuites, + error }) => { if (this.testSessionSpan) { const { isSuitesSkippingEnabled, isCodeCoverageEnabled } = this.itrConfig || {} this.testSessionSpan.setTag(TEST_STATUS, status) this.testModuleSpan.setTag(TEST_STATUS, status) + if (error) { + this.testSessionSpan.setTag('error', error) + this.testModuleSpan.setTag('error', error) + } + addIntelligentTestRunnerSpanTags( this.testSessionSpan, this.testModuleSpan, From eaf7180badbfbfc7694cae542472041caee01889 Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Thu, 26 Oct 2023 17:02:51 -0400 Subject: [PATCH 035/147] fix next.js build errors by refactoring config (#3748) --- .../datadog-plugin-next/test/index.spec.js | 2 +- .../datadog-plugin-next/test/next.config.js | 44 ++++++++++++++++++- 2 files changed, 43 insertions(+), 3 deletions(-) diff --git a/packages/datadog-plugin-next/test/index.spec.js b/packages/datadog-plugin-next/test/index.spec.js index a9138bb0b37..9d3718b63f7 100644 --- a/packages/datadog-plugin-next/test/index.spec.js +++ b/packages/datadog-plugin-next/test/index.spec.js @@ -101,7 +101,7 @@ describe('Plugin', function () { cwd, env: { ...process.env, - version + VERSION: realVersion }, stdio: ['pipe', 'ignore', 'pipe'] }) diff --git a/packages/datadog-plugin-next/test/next.config.js b/packages/datadog-plugin-next/test/next.config.js index f983146e9de..ef21c698c39 100644 --- a/packages/datadog-plugin-next/test/next.config.js +++ b/packages/datadog-plugin-next/test/next.config.js @@ -1,6 +1,46 @@ -module.exports = { +// Build config dynamically for ease in testing and modification + +const { satisfies } = require('semver') + +const { VERSION } = process.env // Next.js version to dynamically modify parts + +const config = { eslint: { ignoreDuringBuilds: true }, - output: 'standalone' + experimental: {} } + +// In older versions of Next.js (11.0.1 and before), the webpack config doesn't support 'node' prefixes by default +// So, any "node" prefixes are replaced for these older versions by this webpack plugin +// Additionally, webpack was having problems with our use of 'worker_threads', so we don't resolve it +if (satisfies(VERSION, '<11.1.0')) { + config.webpack = (config, { webpack }) => { + config.plugins.push( + new webpack.NormalModuleReplacementPlugin(/^node:/, resource => { + resource.request = resource.request.replace(/^node:/, '') + }) + ) + + config.resolve.preferRelative = true + + config.resolve.fallback = { + ...config.resolve.fallback, + worker_threads: false + } + + return config + } +} + +// standalone only enabled in versions it is present +if (satisfies(VERSION, '>=12.0.0')) { + config.output = 'standalone' +} + +// appDir needs to be enabled as experimental +if (satisfies(VERSION, '>=13.3.0 <13.4.0')) { + config.experimental.appDir = true +} + +module.exports = config From 89a9fc3213c6d02c6c8a95ab6f9af4bfbdf86b1b Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Fri, 27 Oct 2023 11:45:58 -0400 Subject: [PATCH 036/147] [core] Next.js: Don't Trace Middleware (#3702) Skip middleware tracing --- packages/datadog-instrumentations/src/next.js | 57 +++++++++++-------- packages/datadog-plugin-next/src/index.js | 2 +- .../datadog-plugin-next/test/middleware.js | 7 +++ packages/datadog-plugin-next/test/server.js | 2 +- 4 files changed, 43 insertions(+), 25 deletions(-) create mode 100644 packages/datadog-plugin-next/test/middleware.js diff --git a/packages/datadog-instrumentations/src/next.js b/packages/datadog-instrumentations/src/next.js index 8f9d2d52959..a418477220a 100644 --- a/packages/datadog-instrumentations/src/next.js +++ b/packages/datadog-instrumentations/src/next.js @@ -1,7 +1,5 @@ 'use strict' -// TODO: either instrument all or none of the render functions - const { channel, addHook } = require('./helpers/instrument') const shimmer = require('../../datadog-shimmer') const { DD_MAJOR } = require('../../../version') @@ -15,6 +13,8 @@ const queryParsedChannel = channel('apm:next:query-parsed') const requests = new WeakSet() +const MIDDLEWARE_HEADER = 'x-middleware-invoke' + function wrapHandleRequest (handleRequest) { return function (req, res, pathname, query) { return instrument(req, res, () => handleRequest.apply(this, arguments)) @@ -56,18 +56,6 @@ function wrapHandleApiRequestWithMatch (handleApiRequest) { } } -function wrapRenderToResponse (renderToResponse) { - return function (ctx) { - return instrument(ctx.req, ctx.res, () => renderToResponse.apply(this, arguments)) - } -} - -function wrapRenderErrorToResponse (renderErrorToResponse) { - return function (ctx) { - return instrument(ctx.req, ctx.res, () => renderErrorToResponse.apply(this, arguments)) - } -} - function wrapRenderToHTML (renderToHTML) { return function (req, res, pathname, query, parsedUrl) { return instrument(req, res, () => renderToHTML.apply(this, arguments)) @@ -80,6 +68,18 @@ function wrapRenderErrorToHTML (renderErrorToHTML) { } } +function wrapRenderToResponse (renderToResponse) { + return function (ctx) { + return instrument(ctx.req, ctx.res, () => renderToResponse.apply(this, arguments)) + } +} + +function wrapRenderErrorToResponse (renderErrorToResponse) { + return function (ctx) { + return instrument(ctx.req, ctx.res, () => renderErrorToResponse.apply(this, arguments)) + } +} + function wrapFindPageComponents (findPageComponents) { return function (pathname, query) { const result = findPageComponents.apply(this, arguments) @@ -114,7 +114,9 @@ function instrument (req, res, handler) { req = req.originalRequest || req res = res.originalResponse || res - if (requests.has(req)) return handler() + // TODO support middleware properly in the future? + const isMiddleware = req.headers[MIDDLEWARE_HEADER] + if (isMiddleware || requests.has(req)) return handler() requests.add(req) @@ -175,27 +177,32 @@ addHook({ file: 'dist/next-server/server/serve-static.js' }, serveStatic => shimmer.wrap(serveStatic, 'serveStatic', wrapServeStatic)) -addHook({ name: 'next', versions: ['>=13.2'], file: 'dist/server/next-server.js' }, nextServer => { +addHook({ name: 'next', versions: ['>=11.1'], file: 'dist/server/next-server.js' }, nextServer => { const Server = nextServer.default shimmer.wrap(Server.prototype, 'handleRequest', wrapHandleRequest) - shimmer.wrap(Server.prototype, 'handleApiRequest', wrapHandleApiRequestWithMatch) + + // Wrapping these makes sure any public API render methods called in a custom server + // are traced properly + // (instead of wrapping the top-level API methods, just wrapping these covers them all) shimmer.wrap(Server.prototype, 'renderToResponse', wrapRenderToResponse) shimmer.wrap(Server.prototype, 'renderErrorToResponse', wrapRenderErrorToResponse) + shimmer.wrap(Server.prototype, 'findPageComponents', wrapFindPageComponents) return nextServer }) -addHook({ name: 'next', versions: ['>=11.1 <13.2'], file: 'dist/server/next-server.js' }, nextServer => { +// `handleApiRequest` changes parameters/implementation at 13.2.0 +addHook({ name: 'next', versions: ['>=13.2'], file: 'dist/server/next-server.js' }, nextServer => { const Server = nextServer.default + shimmer.wrap(Server.prototype, 'handleApiRequest', wrapHandleApiRequestWithMatch) + return nextServer +}) - shimmer.wrap(Server.prototype, 'handleRequest', wrapHandleRequest) +addHook({ name: 'next', versions: ['>=11.1 <13.2'], file: 'dist/server/next-server.js' }, nextServer => { + const Server = nextServer.default shimmer.wrap(Server.prototype, 'handleApiRequest', wrapHandleApiRequest) - shimmer.wrap(Server.prototype, 'renderToResponse', wrapRenderToResponse) - shimmer.wrap(Server.prototype, 'renderErrorToResponse', wrapRenderErrorToResponse) - shimmer.wrap(Server.prototype, 'findPageComponents', wrapFindPageComponents) - return nextServer }) @@ -208,8 +215,12 @@ addHook({ shimmer.wrap(Server.prototype, 'handleRequest', wrapHandleRequest) shimmer.wrap(Server.prototype, 'handleApiRequest', wrapHandleApiRequest) + + // Likewise with newer versions, these correlate to public API render methods for custom servers + // all public ones use these methods somewhere in their code path shimmer.wrap(Server.prototype, 'renderToHTML', wrapRenderToHTML) shimmer.wrap(Server.prototype, 'renderErrorToHTML', wrapRenderErrorToHTML) + shimmer.wrap(Server.prototype, 'findPageComponents', wrapFindPageComponents) return nextServer diff --git a/packages/datadog-plugin-next/src/index.js b/packages/datadog-plugin-next/src/index.js index 6539266399f..b05bda981e0 100644 --- a/packages/datadog-plugin-next/src/index.js +++ b/packages/datadog-plugin-next/src/index.js @@ -64,7 +64,7 @@ class NextPlugin extends ServerPlugin { span.finish() } - pageLoad ({ page, isAppPath }) { + pageLoad ({ page, isAppPath = false }) { const store = storage.getStore() if (!store) return diff --git a/packages/datadog-plugin-next/test/middleware.js b/packages/datadog-plugin-next/test/middleware.js new file mode 100644 index 00000000000..463ea78e0cf --- /dev/null +++ b/packages/datadog-plugin-next/test/middleware.js @@ -0,0 +1,7 @@ +import { NextResponse } from 'next/server' + +export default function middleware () { + // the existence of this file will test that having middleware + // doesn't break instrumentation in tests + return NextResponse.next() +} diff --git a/packages/datadog-plugin-next/test/server.js b/packages/datadog-plugin-next/test/server.js index 673974ac988..e77c478b8a6 100644 --- a/packages/datadog-plugin-next/test/server.js +++ b/packages/datadog-plugin-next/test/server.js @@ -6,7 +6,7 @@ const { createServer } = require('http') const { parse } = require('url') const next = require('next') // eslint-disable-line import/no-extraneous-dependencies -const app = next({ dir: __dirname, dev: false, quiet: true, hostname: HOSTNAME }) +const app = next({ dir: __dirname, dev: false, quiet: true, hostname: HOSTNAME, port: PORT }) const handle = app.getRequestHandler() app.prepare().then(() => { From 10d05a7d656d4241e1b8525107d26d80e7fe607d Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Mon, 30 Oct 2023 09:14:20 +0100 Subject: [PATCH 037/147] Make telemetry metrics true by default (#3747) --- .../src/appsec/iast/telemetry/index.js | 10 +--------- packages/dd-trace/src/config.js | 2 +- .../taint-tracking-operations.spec.js | 7 ++----- .../test/appsec/iast/telemetry/index.spec.js | 20 ++++--------------- packages/dd-trace/test/config.spec.js | 8 ++++---- 5 files changed, 12 insertions(+), 35 deletions(-) diff --git a/packages/dd-trace/src/appsec/iast/telemetry/index.js b/packages/dd-trace/src/appsec/iast/telemetry/index.js index e5ddf04f6c1..1c5da375329 100644 --- a/packages/dd-trace/src/appsec/iast/telemetry/index.js +++ b/packages/dd-trace/src/appsec/iast/telemetry/index.js @@ -5,17 +5,9 @@ const telemetryLogs = require('./log') const { Verbosity, getVerbosity } = require('./verbosity') const { initRequestNamespace, finalizeRequestNamespace, globalNamespace } = require('./namespaces') -function isIastMetricsEnabled (metrics) { - // TODO: let DD_TELEMETRY_METRICS_ENABLED as undefined in config.js to avoid read here the env property - return process.env.DD_TELEMETRY_METRICS_ENABLED !== undefined ? metrics : true -} - class Telemetry { configure (config, verbosity) { - const telemetryAndMetricsEnabled = config && - config.telemetry && - config.telemetry.enabled && - isIastMetricsEnabled(config.telemetry.metrics) + const telemetryAndMetricsEnabled = config?.telemetry?.enabled && config.telemetry.metrics this.verbosity = telemetryAndMetricsEnabled ? getVerbosity(verbosity) : Verbosity.OFF this.enabled = this.verbosity !== Verbosity.OFF diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 1cb22a4334f..8d44e59f91b 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -251,7 +251,7 @@ class Config { ) const DD_TELEMETRY_METRICS_ENABLED = coalesce( process.env.DD_TELEMETRY_METRICS_ENABLED, - false + true ) const DD_TRACE_AGENT_PROTOCOL_VERSION = coalesce( options.protocolVersion, diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-operations.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-operations.spec.js index d9012b47646..0523cad1374 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-operations.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/taint-tracking-operations.spec.js @@ -308,11 +308,8 @@ describe('IAST TaintTracking Operations', () => { [taintTrackingOperations.IAST_TRANSACTION_ID]: 'id' } iastTelemetry.configure({ - telemetry: { enabled: true, metrics: true }, - iast: { - telemetryVerbosity: 'INFORMATION' - } - }) + telemetry: { enabled: true, metrics: true } + }, 'INFORMATION') const requestTaintedAdd = sinon.stub(REQUEST_TAINTED, 'add') diff --git a/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js b/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js index 8fcb9752f3d..78a835b7cde 100644 --- a/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js @@ -66,7 +66,7 @@ describe('Telemetry', () => { expect(telemetryLogs.start).to.be.calledOnce }) - it('should NOT enable telemetry if verbosity is OFF', () => { + it('should not enable telemetry if verbosity is OFF', () => { const iastTelemetry = proxyquire('../../../../src/appsec/iast/telemetry', { './log': telemetryLogs, '../../../telemetry/metrics': telemetryMetrics @@ -83,12 +83,8 @@ describe('Telemetry', () => { expect(telemetryLogs.start).to.be.calledOnce }) - it('should enable telemetry if metrics not enabled but DD_TELEMETRY_METRICS_ENABLED is undefined', () => { - const origTelemetryMetricsEnabled = process.env.DD_TELEMETRY_METRICS_ENABLED - - delete process.env.DD_TELEMETRY_METRICS_ENABLED - - const telemetryConfig = { enabled: true, metrics: false } + it('should enable telemetry if telemetry.metrics is true', () => { + const telemetryConfig = { enabled: true, metrics: true } iastTelemetry.configure({ telemetry: telemetryConfig }) @@ -97,15 +93,9 @@ describe('Telemetry', () => { expect(iastTelemetry.verbosity).to.be.equal(Verbosity.INFORMATION) expect(telemetryMetrics.manager.set).to.be.calledOnce expect(telemetryLogs.start).to.be.calledOnce - - process.env.DD_TELEMETRY_METRICS_ENABLED = origTelemetryMetricsEnabled }) - it('should not enable telemetry if metrics not enabled but DD_TELEMETRY_METRICS_ENABLED is defined', () => { - const origTelemetryMetricsEnabled = process.env.DD_TELEMETRY_METRICS_ENABLED - - process.env.DD_TELEMETRY_METRICS_ENABLED = 'false' - + it('should not enable telemetry if telemetry.metrics is false', () => { const telemetryConfig = { enabled: true, metrics: false } iastTelemetry.configure({ telemetry: telemetryConfig @@ -115,8 +105,6 @@ describe('Telemetry', () => { expect(iastTelemetry.verbosity).to.be.equal(Verbosity.OFF) expect(telemetryMetrics.manager.set).to.not.be.called expect(telemetryLogs.start).to.be.calledOnce - - process.env.DD_TELEMETRY_METRICS_ENABLED = origTelemetryMetricsEnabled }) }) diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 271f57c4ce4..df5759b87ee 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -935,7 +935,7 @@ describe('Config', () => { expect(config.telemetry.heartbeatInterval).to.eq(60000) expect(config.telemetry.logCollection).to.be.false expect(config.telemetry.debug).to.be.false - expect(config.telemetry.metrics).to.be.false + expect(config.telemetry.metrics).to.be.true }) it('should set DD_TELEMETRY_HEARTBEAT_INTERVAL', () => { @@ -960,13 +960,13 @@ describe('Config', () => { process.env.DD_TRACE_TELEMETRY_ENABLED = origTraceTelemetryValue }) - it('should set DD_TELEMETRY_METRICS_ENABLED', () => { + it('should not set DD_TELEMETRY_METRICS_ENABLED', () => { const origTelemetryMetricsEnabledValue = process.env.DD_TELEMETRY_METRICS_ENABLED - process.env.DD_TELEMETRY_METRICS_ENABLED = 'true' + process.env.DD_TELEMETRY_METRICS_ENABLED = 'false' const config = new Config() - expect(config.telemetry.metrics).to.be.true + expect(config.telemetry.metrics).to.be.false process.env.DD_TELEMETRY_METRICS_ENABLED = origTelemetryMetricsEnabledValue }) From eaa9d5227c809045e9f72e73dbd56dea528b5297 Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Mon, 30 Oct 2023 10:43:14 +0100 Subject: [PATCH 038/147] Hardcoded secret detection [APPSEC-11805] (#3687) * Add hardcoded secret analyzer. * Add secret samples * Include secret column * Use the new rewriter literals API * Upgrade rewriter version Co-authored-by: Ugaitz Urien --------- Co-authored-by: Julio Gonzalez Co-authored-by: Ugaitz Urien --- package.json | 2 +- .../src/appsec/iast/analyzers/analyzers.js | 1 + .../analyzers/hardcoded-secret-analyzer.js | 60 ++++ .../iast/analyzers/hardcoded-secrets-rules.js | 269 ++++++++++++++++++ .../dd-trace/src/appsec/iast/path-line.js | 7 +- .../appsec/iast/taint-tracking/rewriter.js | 15 +- .../src/appsec/iast/vulnerabilities.js | 1 + .../hardcoded-secret-analyzer.spec.js | 114 ++++++++ .../resources/hardcoded-secret-functions.js | 7 + .../resources/hardcoded-secrets-suite.json | 269 ++++++++++++++++++ yarn.lock | 8 +- 11 files changed, 745 insertions(+), 8 deletions(-) create mode 100644 packages/dd-trace/src/appsec/iast/analyzers/hardcoded-secret-analyzer.js create mode 100644 packages/dd-trace/src/appsec/iast/analyzers/hardcoded-secrets-rules.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secret-functions.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secrets-suite.json diff --git a/package.json b/package.json index ab10d435c94..83c6ced1542 100644 --- a/package.json +++ b/package.json @@ -69,7 +69,7 @@ }, "dependencies": { "@datadog/native-appsec": "^4.0.0", - "@datadog/native-iast-rewriter": "2.1.3", + "@datadog/native-iast-rewriter": "2.2.0", "@datadog/native-iast-taint-tracking": "1.6.3", "@datadog/native-metrics": "^2.0.0", "@datadog/pprof": "4.0.1", diff --git a/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js b/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js index d76b8bd1e7f..62933baa24d 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js @@ -2,6 +2,7 @@ module.exports = { 'COMMAND_INJECTION_ANALYZER': require('./command-injection-analyzer'), + 'HARCODED_SECRET_ANALYZER': require('./hardcoded-secret-analyzer'), 'HSTS_HEADER_MISSING_ANALYZER': require('./hsts-header-missing-analyzer'), 'INSECURE_COOKIE_ANALYZER': require('./insecure-cookie-analyzer'), 'LDAP_ANALYZER': require('./ldap-injection-analyzer'), diff --git a/packages/dd-trace/src/appsec/iast/analyzers/hardcoded-secret-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/hardcoded-secret-analyzer.js new file mode 100644 index 00000000000..6b1a6172e1f --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/analyzers/hardcoded-secret-analyzer.js @@ -0,0 +1,60 @@ +'use strict' + +const Analyzer = require('./vulnerability-analyzer') +const { HARDCODED_SECRET } = require('../vulnerabilities') +const { getRelativePath } = require('../path-line') + +const secretRules = require('./hardcoded-secrets-rules') + +class HardcodedSecretAnalyzer extends Analyzer { + constructor () { + super(HARDCODED_SECRET) + } + + onConfigure () { + this.addSub('datadog:secrets:result', (secrets) => { this.analyze(secrets) }) + } + + analyze (secrets) { + if (!secrets?.file || !secrets.literals) return + + const matches = secrets.literals + .filter(literal => literal.value && literal.locations?.length) + .map(literal => { + const match = secretRules.find(rule => literal.value.match(rule.regex)) + + return match ? { locations: literal.locations, ruleId: match.id } : undefined + }) + .filter(match => !!match) + + if (matches.length) { + const file = getRelativePath(secrets.file) + + matches.forEach(match => { + match.locations + .filter(location => location.line) + .forEach(location => this._report({ + file, + line: location.line, + column: location.column, + data: match.ruleId + })) + }) + } + } + + _getEvidence (value) { + return { value: `${value.data}` } + } + + _getLocation (value) { + return { + path: value.file, + line: value.line, + column: value.column, + isInternal: false + } + } +} + +module.exports = new HardcodedSecretAnalyzer() diff --git a/packages/dd-trace/src/appsec/iast/analyzers/hardcoded-secrets-rules.js b/packages/dd-trace/src/appsec/iast/analyzers/hardcoded-secrets-rules.js new file mode 100644 index 00000000000..b6069585e5c --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/analyzers/hardcoded-secrets-rules.js @@ -0,0 +1,269 @@ +/* eslint-disable max-len */ +'use strict' + +module.exports = [ + { + 'id': 'adobe-client-secret', + 'regex': /\b((p8e-)[a-z0-9]{32})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'age-secret-key', + 'regex': /AGE-SECRET-KEY-1[QPZRY9X8GF2TVDW0S3JN54KHCE6MUA7L]{58}/ + }, + { + 'id': 'alibaba-access-key-id', + 'regex': /\b((LTAI)[a-z0-9]{20})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'authress-service-client-access-key', + 'regex': /\b((?:sc|ext|scauth|authress)_[a-z0-9]{5,30}\.[a-z0-9]{4,6}\.acc[_-][a-z0-9-]{10,32}\.[a-z0-9+/_=-]{30,120})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'aws-access-token', + 'regex': /\b((A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16})(?:['"\s\x60;]|$)/ + }, + { + 'id': 'clojars-api-token', + 'regex': /(CLOJARS_)[a-z0-9]{60}/i + }, + { + 'id': 'databricks-api-token', + 'regex': /\b(dapi[a-h0-9]{32})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'digitalocean-access-token', + 'regex': /\b(doo_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'digitalocean-pat', + 'regex': /\b(dop_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'digitalocean-refresh-token', + 'regex': /\b(dor_v1_[a-f0-9]{64})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'doppler-api-token', + 'regex': /(dp\.pt\.)[a-z0-9]{43}/i + }, + { + 'id': 'duffel-api-token', + 'regex': /duffel_(test|live)_[a-z0-9_\-=]{43}/i + }, + { + 'id': 'dynatrace-api-token', + 'regex': /dt0c01\.[a-z0-9]{24}\.[a-z0-9]{64}/i + }, + { + 'id': 'easypost-api-token', + 'regex': /\bEZAK[a-z0-9]{54}/i + }, + { + 'id': 'flutterwave-public-key', + 'regex': /FLWPUBK_TEST-[a-h0-9]{32}-X/i + }, + { + 'id': 'frameio-api-token', + 'regex': /fio-u-[a-z0-9\-_=]{64}/i + }, + { + 'id': 'gcp-api-key', + 'regex': /\b(AIza[0-9a-z\-_]{35})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'github-app-token', + 'regex': /(ghu|ghs)_[0-9a-zA-Z]{36}/ + }, + { + 'id': 'github-fine-grained-pat', + 'regex': /github_pat_[0-9a-zA-Z_]{82}/ + }, + { + 'id': 'github-oauth', + 'regex': /gho_[0-9a-zA-Z]{36}/ + }, + { + 'id': 'github-pat', + 'regex': /ghp_[0-9a-zA-Z]{36}/ + }, + { + 'id': 'gitlab-pat', + 'regex': /glpat-[0-9a-zA-Z\-_]{20}/ + }, + { + 'id': 'gitlab-ptt', + 'regex': /glptt-[0-9a-f]{40}/ + }, + { + 'id': 'gitlab-rrt', + 'regex': /GR1348941[0-9a-zA-Z\-_]{20}/ + }, + { + 'id': 'grafana-api-key', + 'regex': /\b(eyJrIjoi[a-z0-9]{70,400}={0,2})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'grafana-cloud-api-token', + 'regex': /\b(glc_[a-z0-9+/]{32,400}={0,2})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'grafana-service-account-token', + 'regex': /\b(glsa_[a-z0-9]{32}_[a-f0-9]{8})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'hashicorp-tf-api-token', + 'regex': /[a-z0-9]{14}\.atlasv1\.[a-z0-9\-_=]{60,70}/i + }, + { + 'id': 'jwt', + 'regex': /\b(ey[a-zA-Z0-9]{17,}\.ey[a-zA-Z0-9/_-]{17,}\.(?:[a-zA-Z0-9/_-]{10,}={0,2})?)(?:['"\s\x60;]|$)/ + }, + { + 'id': 'linear-api-key', + 'regex': /lin_api_[a-z0-9]{40}/i + }, + { + 'id': 'npm-access-token', + 'regex': /\b(npm_[a-z0-9]{36})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'openai-api-key', + 'regex': /\b(sk-[a-z0-9]{20}T3BlbkFJ[a-z0-9]{20})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'planetscale-api-token', + 'regex': /\b(pscale_tkn_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'planetscale-oauth-token', + 'regex': /\b(pscale_oauth_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'planetscale-password', + 'regex': /\b(pscale_pw_[a-z0-9=\-_.]{32,64})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'postman-api-token', + 'regex': /\b(PMAK-[a-f0-9]{24}-[a-f0-9]{34})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'prefect-api-token', + 'regex': /\b(pnu_[a-z0-9]{36})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'private-key', + 'regex': /-----BEGIN[ A-Z0-9_-]{0,100}PRIVATE KEY( BLOCK)?-----[\s\S]*KEY( BLOCK)?----/i + }, + { + 'id': 'pulumi-api-token', + 'regex': /\b(pul-[a-f0-9]{40})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'pypi-upload-token', + 'regex': /pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}/ + }, + { + 'id': 'readme-api-token', + 'regex': /\b(rdme_[a-z0-9]{70})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'rubygems-api-token', + 'regex': /\b(rubygems_[a-f0-9]{48})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'scalingo-api-token', + 'regex': /tk-us-[a-zA-Z0-9-_]{48}/ + }, + { + 'id': 'sendgrid-api-token', + 'regex': /\b(SG\.[a-z0-9=_\-.]{66})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'sendinblue-api-token', + 'regex': /\b(xkeysib-[a-f0-9]{64}-[a-z0-9]{16})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'shippo-api-token', + 'regex': /\b(shippo_(live|test)_[a-f0-9]{40})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'shopify-access-token', + 'regex': /shpat_[a-fA-F0-9]{32}/ + }, + { + 'id': 'shopify-custom-access-token', + 'regex': /shpca_[a-fA-F0-9]{32}/ + }, + { + 'id': 'shopify-private-app-access-token', + 'regex': /shppa_[a-fA-F0-9]{32}/ + }, + { + 'id': 'shopify-shared-secret', + 'regex': /shpss_[a-fA-F0-9]{32}/ + }, + { + 'id': 'slack-app-token', + 'regex': /(xapp-\d-[A-Z0-9]+-\d+-[a-z0-9]+)/i + }, + { + 'id': 'slack-bot-token', + 'regex': /(xoxb-[0-9]{10,13}-[0-9]{10,13}[a-zA-Z0-9-]*)/ + }, + { + 'id': 'slack-config-access-token', + 'regex': /(xoxe.xox[bp]-\d-[A-Z0-9]{163,166})/i + }, + { + 'id': 'slack-config-refresh-token', + 'regex': /(xoxe-\d-[A-Z0-9]{146})/i + }, + { + 'id': 'slack-legacy-bot-token', + 'regex': /(xoxb-[0-9]{8,14}-[a-zA-Z0-9]{18,26})/ + }, + { + 'id': 'slack-legacy-token', + 'regex': /(xox[os]-\d+-\d+-\d+-[a-fA-F\d]+)/ + }, + { + 'id': 'slack-legacy-workspace-token', + 'regex': /(xox[ar]-(?:\d-)?[0-9a-zA-Z]{8,48})/ + }, + { + 'id': 'slack-user-token', + 'regex': /(xox[pe](?:-[0-9]{10,13}){3}-[a-zA-Z0-9-]{28,34})/ + }, + { + 'id': 'slack-webhook-url', + 'regex': /(https?:\/\/)?hooks.slack.com\/(services|workflows)\/[A-Za-z0-9+/]{43,46}/ + }, + { + 'id': 'square-access-token', + 'regex': /\b(sq0atp-[0-9a-z\-_]{22})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'square-secret', + 'regex': /\b(sq0csp-[0-9a-z\-_]{43})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'stripe-access-token', + 'regex': /(sk|pk)_(test|live)_[0-9a-z]{10,32}/i + }, + { + 'id': 'telegram-bot-api-token', + 'regex': /(?:^|[^0-9])([0-9]{5,16}:A[a-z0-9_-]{34})(?:$|[^a-z0-9_-])/i + }, + { + 'id': 'twilio-api-key', + 'regex': /SK[0-9a-fA-F]{32}/ + }, + { + 'id': 'vault-batch-token', + 'regex': /\b(hvb\.[a-z0-9_-]{138,212})(?:['"\s\x60;]|$)/i + }, + { + 'id': 'vault-service-token', + 'regex': /\b(hvs\.[a-z0-9_-]{90,100})(?:['"\s\x60;]|$)/i + } +] diff --git a/packages/dd-trace/src/appsec/iast/path-line.js b/packages/dd-trace/src/appsec/iast/path-line.js index 5c24c916a88..8a262e04976 100644 --- a/packages/dd-trace/src/appsec/iast/path-line.js +++ b/packages/dd-trace/src/appsec/iast/path-line.js @@ -6,6 +6,7 @@ const { calculateDDBasePath } = require('../../util') const pathLine = { getFirstNonDDPathAndLine, getNodeModulesPaths, + getRelativePath, getFirstNonDDPathAndLineFromCallsites, // Exported only for test purposes calculateDDBasePath, // Exported only for test purposes ddBasePath: calculateDDBasePath(__dirname) // Only for test purposes @@ -45,7 +46,7 @@ function getFirstNonDDPathAndLineFromCallsites (callsites, externallyExcludedPat const filepath = callsite.getFileName() if (!isExcluded(callsite, externallyExcludedPaths) && filepath.indexOf(pathLine.ddBasePath) === -1) { return { - path: path.relative(process.cwd(), filepath), + path: getRelativePath(filepath), line: callsite.getLineNumber(), column: callsite.getColumnNumber(), isInternal: !path.isAbsolute(filepath) @@ -56,6 +57,10 @@ function getFirstNonDDPathAndLineFromCallsites (callsites, externallyExcludedPat return null } +function getRelativePath (filepath) { + return path.relative(process.cwd(), filepath) +} + function isExcluded (callsite, externallyExcludedPaths) { if (callsite.isNative()) return true const filename = callsite.getFileName() diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js index 2df59709ec5..dda4a65e122 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js @@ -7,7 +7,9 @@ const { isPrivateModule, isNotLibraryFile } = require('./filter') const { csiMethods } = require('./csi-methods') const { getName } = require('../telemetry/verbosity') const { getRewriteFunction } = require('./rewriter-telemetry') +const dc = require('../../../../../diagnostics_channel') +const hardcodedSecretCh = dc.channel('datadog:secrets:result') let rewriter let getPrepareStackTrace @@ -50,7 +52,11 @@ function getRewriter (telemetryVerbosity) { getGetOriginalPathAndLineFromSourceMapFunction(chainSourceMap, getOriginalPathAndLineFromSourceMap) } - rewriter = new Rewriter({ csiMethods, telemetryVerbosity: getName(telemetryVerbosity), chainSourceMap }) + rewriter = new Rewriter({ + csiMethods, + telemetryVerbosity: getName(telemetryVerbosity), + chainSourceMap + }) } catch (e) { iastLog.error('Unable to initialize TaintTracking Rewriter') .errorAndPublish(e) @@ -80,7 +86,12 @@ function getCompileMethodFn (compileMethod) { try { if (isPrivateModule(filename) && isNotLibraryFile(filename)) { const rewritten = rewriteFn(content, filename) - if (rewritten && rewritten.content) { + + if (rewritten?.literalsResult && hardcodedSecretCh.hasSubscribers) { + hardcodedSecretCh.publish(rewritten.literalsResult) + } + + if (rewritten?.content) { return compileMethod.apply(this, [rewritten.content, filename]) } } diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities.js b/packages/dd-trace/src/appsec/iast/vulnerabilities.js index d885d3406b0..1815bd4e201 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities.js @@ -1,5 +1,6 @@ module.exports = { COMMAND_INJECTION: 'COMMAND_INJECTION', + HARDCODED_SECRET: 'HARDCODED_SECRET', HSTS_HEADER_MISSING: 'HSTS_HEADER_MISSING', INSECURE_COOKIE: 'INSECURE_COOKIE', LDAP_INJECTION: 'LDAP_INJECTION', diff --git a/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js new file mode 100644 index 00000000000..9a8533dfd45 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js @@ -0,0 +1,114 @@ +'use strict' + +const path = require('path') +const fs = require('fs') +const os = require('os') + +const agent = require('../../../plugins/agent') +const Config = require('../../../../src/config') + +const hardcodedSecretAnalyzer = require('../../../../src/appsec/iast/analyzers/hardcoded-secret-analyzer') +const { suite } = require('./resources/hardcoded-secrets-suite.json') +const iast = require('../../../../src/appsec/iast') + +describe('Hardcoded Secret Analyzer', () => { + describe('unit test', () => { + const relFile = path.join('path', 'to', 'file.js') + const file = path.join(process.cwd(), relFile) + const line = 42 + const column = 3 + + let report + beforeEach(() => { + report = sinon.stub(hardcodedSecretAnalyzer, '_report') + }) + + afterEach(sinon.restore) + + suite.forEach((testCase) => { + testCase.samples.forEach(sample => { + it(`should match rule ${testCase.id} with value ${sample}`, () => { + hardcodedSecretAnalyzer.analyze({ + file, + literals: [{ + value: sample, + locations: [{ + line, + column + }] + }] + }) + + expect(report).to.have.been.calledOnceWithExactly({ file: relFile, line, column, data: testCase.id }) + }) + }) + }) + + it('should not fail with an malformed secrets', () => { + expect(() => hardcodedSecretAnalyzer.analyze(undefined)).not.to.throw() + expect(() => hardcodedSecretAnalyzer.analyze({ file: undefined })).not.to.throw() + expect(() => hardcodedSecretAnalyzer.analyze({ file, literals: undefined })).not.to.throw() + expect(() => hardcodedSecretAnalyzer.analyze({ file, literals: [{ value: undefined }] })).not.to.throw() + expect(() => hardcodedSecretAnalyzer.analyze({ file, literals: [{ value: 'test' }] })).not.to.throw() + }) + + it('should not report secrets in line 0', () => { + hardcodedSecretAnalyzer.analyze({ + file, + literals: [{ value: 'test', line: 0 }] + }) + + expect(report).not.to.have.been.called + }) + }) + + describe('full feature', () => { + const filename = 'hardcoded-secret-functions' + const functionsPath = path.join(os.tmpdir(), filename) + + before(() => { + fs.copyFileSync(path.join(__dirname, 'resources', `${filename}.js`), functionsPath) + }) + + after(() => { + fs.unlinkSync(functionsPath) + }) + + describe('with iast enabled', () => { + beforeEach(() => { + return agent.load(undefined, undefined, { flushInterval: 1 }) + }) + + beforeEach(() => { + const tracer = require('../../../../') + iast.enable(new Config({ + experimental: { + iast: { + enabled: true, + requestSampling: 100 + } + } + }), tracer) + }) + + afterEach(() => { + iast.disable() + }) + + afterEach(() => { + return agent.close({ ritmReset: false }) + }) + + it('should detect vulnerability', (done) => { + agent + .use(traces => { + expect(traces[0][0].meta['_dd.iast.json']).to.include('"HARDCODED_SECRET"') + }) + .then(done) + .catch(done) + + require(functionsPath) + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secret-functions.js b/packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secret-functions.js new file mode 100644 index 00000000000..a9df77503b0 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secret-functions.js @@ -0,0 +1,7 @@ +'use strict' + +const secret = 'A3TMAWZUKIWR6O0OGR7B' + +module.exports = { + secret +} diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secrets-suite.json b/packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secrets-suite.json new file mode 100644 index 00000000000..385be9bf4b9 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secrets-suite.json @@ -0,0 +1,269 @@ +{ + "version": "1.0", + "suite": [ + { + "id": "adobe-client-secret", + "samples": ["p8e-042c420E161f7DcF56Bc23414b5Bd9C0"] + }, + { + "id": "age-secret-key", + "samples": ["AGE-SECRET-KEY-1QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ"] + }, + { + "id": "alibaba-access-key-id", + "samples": ["LTAIdmr29cel83fy2lyyeovp"] + }, + { + "id": "authress-service-client-access-key", + "samples": ["sc_z44wdom1zo8rju28l.r9t1b.acc_rpml1az3z4osyg7x2vjphgt26z42dq8.7+g1dba23g6rglng38i-vz4l+xsbi/2k+t24-jx4ww2bpezxp1bkj_rhg+6uy-t7sww2yxgultr=_2+6=90v9sa=cuyescgxc9y7w/-7vw=5vsks-aw", "ext_l889tuwc8bchl45cecib53al.ipvif.acc-qtpje8wk-k75.u4s1xhn/6u9u=zumfc86/z2/y5gu6k4_=v8464ac7i0aw4gle-8jdcw9cwd6b7ew5/vr65uqb", "scauth_8z8yn4w4o3os5798yjxhaadei.i0rmr.acc_hld57ogjggplejcwq4ci.74=y9s4y+ocm=mx8fcmdqb8y6--wzl1kk3pwfjb_r0l7o-g=u5m55gn5+o69sh=z26bk/a2+bo045bk78ac9-+ueqy05rhtj", "authress_xet76brco0osua5d9bqedw.l75lm8.acc-56pcd8z589v3vfcw4v3y.fbexbmkab4fe_+vdu/_qfdtvmc0jnb3xma38mudys9/js1zlwvn28jmbwxk=1a1-ax7/h2"] + }, + { + "id": "aws-access-token", + "samples": ["A3TMAWZUKIWR6O0OGR7B", "AKIACGDXGKON38QRPSN6", "AGPA7L8NOOMJ8AR1TTNF", "AIDAUNJX3LKHQJGAQ6MS", "AROATVD0Y20OIK7VJPCL", "AIPA7AXHEAXF7EFJREEJ", "ANPA91BE6VSIUANWG4DY", "ANVA8M4EPVQU0XVE1G3L", "ASIAPLKQBPE8T9JT1KBV"] + }, + { + "id": "clojars-api-token", + "samples": ["CLOJARS_93m8jgicwpny2zbzbzyk0id7pg7h64zd41m4skb5lrwtkl5k941hsob1zosd"] + }, + { + "id": "databricks-api-token", + "samples": ["dapiad55e7agdd5469e223cf3eab02042cc6"] + }, + { + "id": "digitalocean-access-token", + "samples": ["doo_v1_3a61580fc6e8053e6f00a533dd53de2a347ad0fa94d47ab12e36a5671ce1c7b7"] + }, + { + "id": "digitalocean-pat", + "samples": ["dop_v1_3ef76a17874a2506cc60646c5d4ff6ab96d5866341500b98a80af0c91cc2fb1b"] + }, + { + "id": "digitalocean-refresh-token", + "samples": ["dor_v1_ef583b09000185cbea95598892ac77c2f035f29cd73fff20b076cc2e178abeb2"] + }, + { + "id": "doppler-api-token", + "samples": ["dp.pt.5esgp01d2s5mtsv9vbq9ong5jksv6e70cdsjbixxdyn"] + }, + { + "id": "duffel-api-token", + "samples": ["duffel_test_z-8nmbsiyv6t_y9figh0velran-np582=tilqgk7z2e", "duffel_live_kamqmtr-s1eu0qn=de65zdtqpy2b8na6a4g=bub8o_k"] + }, + { + "id": "dynatrace-api-token", + "samples": ["dt0c01.nyw93k1t6k4gr5az2oceyjs6.m8uc8qz3ibz90hvur8079rfrld95fvdgz0rmhq84enj4lzbgrthl29k42c14q4yw"] + }, + { + "id": "easypost-api-token", + "samples": ["EZAKtka0inaa62wilgvc2ovo2n0pxteydnkyx61paqiggl6hxwr5f2yqci"] + }, + { + "id": "flutterwave-public-key", + "samples": ["FLWPUBK_TEST-c22b4eg81182hhdagchd0016abe1f472-X"] + }, + { + "id": "frameio-api-token", + "samples": ["fio-u-3x7rov97qo13bdr=3mlza00b5bya5x50mfz6lx_xtoce6b=_4uvou_qc-6zn-9b5"] + }, + { + "id": "gcp-api-key", + "samples": ["AIza1RvSeh9Ni9feYDm4gEH3R8NkbvUzdgbLGap"] + }, + { + "id": "github-app-token", + "samples": ["ghu_OCfEKaeP0l5vmtP9aI2BuSaz2keLqTISTtyJ", "ghs_TIFULl9l1YQvBDbcgMWsGCqVto6DcBTV5Zh0"] + }, + { + "id": "github-fine-grained-pat", + "samples": ["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"] + }, + { + "id": "github-oauth", + "samples": ["gho_RK5eFZyEy5bHC7LKmlpxsP9BmuqwkjkKNTeR"] + }, + { + "id": "github-pat", + "samples": ["ghp_5w9Qp0PNLroCShgHLK8T4aQJdbK2yjnYo9Y8"] + }, + { + "id": "gitlab-pat", + "samples": ["glpat-IKx0e0mpgRgyHEHChuhi"] + }, + { + "id": "gitlab-ptt", + "samples": ["glptt-6538261e6952d9167791839d18f35d80b9454719"] + }, + { + "id": "gitlab-rrt", + "samples": ["GR1348941uSeVnS3DcR0GedeOoqFs"] + }, + { + "id": "grafana-api-key", + "samples": ["eyJrIjoilkFWbNTrH6ZBwDWV6tp4kH8gVeZBj4EaIZ1QIh4nt4E3lmbJ1bbq8hReljA7xSIN183XLFNYyBKqDF4sDPKHcYblIXoih7zbv0BmtE8vOC27WkbgoR3iHBudhSOyao10Mt4MDS99RY3ageOh6I4PDQUuGiFbzJUjLBunqOdWS4RtIU7CDq2Sulw5ZKFfNhJHDTOetl1rpi7J9klwkCXIQCzkOzNfJ7JuvJpzGcTbzLnLon6yiT4KKA0BdnQ3XVirZMYz5wpIt5TFYn6l3Z7zD8kuToVcMZDdn6wbPH2vnvaOrvmzrpA8YsnalngRaN6U1zcc0eVwgLLSiVDhBguhYFF4SWzmllVazFW1S"] + }, + { + "id": "grafana-cloud-api-token", + "samples": ["glc_NUOcNiW3Ql+Q6dZ8xwjKfpuL72xdd/MP4ijNKo3X8flBnWb9bJlHv+fjlDhXx4aRZuXX6LxJ4LOAEtjka0BTZEf5D/oHHgmDEUo5+DCPYcOkwee4f8893G3UPdU6jqCXlK7wEqlXxy7917IkhwnKjpM+cTpgvHsua3+mjuz1XlLEL6eUVvZzC4z899UwAEnpGNi+8iBz8yy+dHyh1VLAu5rhDComZC0S9i++XgXytm2QF1e1Ky7r5n7MhA665bZ6+gA7J0JU9ZSHDacOjzcmGIZWmxcXld5SN7Sk0jsVpnufUzJGksWG/yz1HqUXYyvggA6HI4Z/GG9fa5BznVy4GzSbHccKOV833N/U5C+sTX/40UbOphvD/3Zttf/rbPxxKGnM5s6lX"] + }, + { + "id": "grafana-service-account-token", + "samples": ["glsa_tyZkxBNfrk31AyRmYCRQDkMstrOU6mJq_a27a7E2E"] + }, + { + "id": "hashicorp-tf-api-token", + "samples": ["cgjiwnvcstc3tf.atlasv1.17e58vdqi_2fpmt7ycmuastd627exxc3ulxbd7lg_3bk8ji3a0r3ixxjob_g0=5x9="] + }, + { + "id": "jwt", + "samples": ["eygSDWUVjIpApmLFgg5js.ey7TxRd7iF-8Cx6NgcI6T-dcv0.1wQL45DMDTA=", "ey0NOJt93t9zeJbBtD2wI.eyoLE6hd_XC6SSVl3xAEb1pHwq."] + }, + { + "id": "linear-api-key", + "samples": ["lin_api_l4ak83b2jsjf1qjae5dzn45v7com0lzihn3h8qrc"] + }, + { + "id": "npm-access-token", + "samples": ["npm_5qxvhxa03dxiteq9pm6pzc73b7nxcbxnacsn"] + }, + { + "id": "openai-api-key", + "samples": ["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"] + }, + { + "id": "planetscale-api-token", + "samples": ["pscale_tkn_crpb04n.ck=r.-d-8m=-0..fn7xz36eja0ap886d__hgd3ld"] + }, + { + "id": "planetscale-oauth-token", + "samples": ["pscale_oauth_be0oc6crk27h2kjg0qn=1cau6_256bxvtu2"] + }, + { + "id": "planetscale-password", + "samples": ["pscale_pw_bjem8uv5og17orsga=h454jyjqetfy615duagc7j=s8l_kr.glbh__.la"] + }, + { + "id": "postman-api-token", + "samples": ["PMAK-1797993c98e59a0f9c09bc3b-e4dce5c2256119617285a763e2fb63ee75"] + }, + { + "id": "prefect-api-token", + "samples": ["pnu_knaxhdfqlx5rhxqiwxodq4w50nc25prw0kqe"] + }, + { + "id": "private-key", + "samples": ["-----BEGINF4PDVGVUG99E46VLEP4OQ982F7WHB-NUUGBA8-6E187SG61F0JY7JHGI7Z90PRIVATE KEY----- - KEY----"] + }, + { + "id": "pulumi-api-token", + "samples": ["pul-8437f190bb1e04414d15b87af38cd68dff596dbf"] + }, + { + "id": "pypi-upload-token", + "samples": ["pypi-AgEIcHlwaS5vcmcerQnhfw1KFC_Xv-2lgxlzJTNQ7C8qpZJGGpPlZ1FIUEr3N-mwINvTx1UY"] + }, + { + "id": "readme-api-token", + "samples": ["rdme_nuq3gsy66jd0jhyyqrcwmb8pgtqx4ljl4tbjxlfta9lzsa5acwlmj1zlwawxcabua250fo"] + }, + { + "id": "rubygems-api-token", + "samples": ["rubygems_cf23f5e8c403da8bb754452198555cc743ca5cf3fb194073"] + }, + { + "id": "scalingo-api-token", + "samples": ["tk-us-TX4DXzB0cNC_KnbIL8N14v8GhrJqH2LelLaxqy3mVmRH4MY3"] + }, + { + "id": "sendgrid-api-token", + "samples": ["SG.rbsi3jp7ont7glhbo6.i0kicrd10qzut17d5e8royv9cpnw9ttu5504g7p50v3q36."] + }, + { + "id": "sendinblue-api-token", + "samples": ["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"] + }, + { + "id": "shippo-api-token", + "samples": ["shippo_live_af42cbb904f27f042607741b265c6ccdb96ff56d", "shippo_test_e50a31d2632326072aff8cd1b073ae79ca39ba8e"] + }, + { + "id": "shopify-access-token", + "samples": ["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"] + }, + { + "id": "shopify-custom-access-token", + "samples": ["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"] + }, + { + "id": "shopify-private-app-access-token", + "samples": ["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"] + }, + { + "id": "shopify-shared-secret", + "samples": ["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"] + }, + { + "id": "slack-app-token", + "samples": ["xapp-2-TQRU-2-pi9b"] + }, + { + "id": "slack-bot-token", + "samples": ["xoxb-98990225585-12353152514"] + }, + { + "id": "slack-config-access-token", + "samples": ["xoxe.xoxb-9-PMWS1VD0PB4OA3DK5YFEHVCZTBHO5S70C0W8QCFXX2AQ0WH8L8DMO223K68ZNY56EDMKHF42SSQPUV974JD6PULK73YTDQJ86EMWJ71FEL36LNJY7B5EV9RT2CMNQKYCIRSPINX7R72D0O8UT9WCUEN7X23HUNZ3W1Y", "xoxe.xoxp-8-PTN8WYGN3ND14QYWSDEV2QTQMEI8R90FZ0UCZWRY0J9QE2TIEAIUYJA9A7VSQI64X7CD6W75WQ4SQDLTV93R0GRPZUB1B9HSNF3G0PLSDN8HPST7VB6QOODRQWNMUF4WILELJE3GYLONHJAHBEG9D0E8BM0EB887SM8DNH"] + }, + { + "id": "slack-config-refresh-token", + "samples": ["xoxe-4-02DA0JGWGLP95TSHFK7OYEZMQ45K733RZ0M8B7KCTS5XS8X3Q5I70CTWC0U6N6YNR8JFGTGH944GIC4RHC3BU61XG05LNNZX3RGUYRW7YA75HNNA5CZLCX2UHW9PX54WFHN16K2ZP6MU95Q3N5"] + }, + { + "id": "slack-legacy-bot-token", + "samples": ["xoxb-2635165305-qYktSa9LhLqa2jsoJuk"] + }, + { + "id": "slack-legacy-token", + "samples": ["xoxs-6-8-0-E3bc3"] + }, + { + "id": "slack-legacy-workspace-token", + "samples": ["xoxa-0-syVLBd4iVIadCxpG9rPMLP1FvrRs11UmaulCnXp", "xoxr-wPFyQFwKOTbIkg0DwggalChTNO28bNjc23fW9tO"] + }, + { + "id": "slack-user-token", + "samples": ["xoxe-6679527551-6986334739-864697748900-426-jwW3zxCLz3VcIsJq-ZgF8t8UdsfK"] + }, + { + "id": "slack-webhook-url", + "samples": ["https://hooks.slack.com/services/a5ixwCnZkBl/sRAEeDcbQP+JDmyJpdtNgypWZYuyNgi", "https://hooks.slack.com/workflows/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"] + }, + { + "id": "square-access-token", + "samples": ["sq0atp-S_gaYllSJ9Oc7Ek8i_2FTZ"] + }, + { + "id": "square-secret", + "samples": ["sq0csp-W2DDXterE69EiRm8U_G3TqVw2X6-aEZ1uEAYKYEahKA"] + }, + { + "id": "stripe-access-token", + "samples": ["sk_test_z01pxuk87iboja60sn9zj", "sk_live_cg98txrpd4nz", "pk_test_0he1pnbtdwhz0c8s025yuokfp1mhh56", "pk_live_kc5idmjc8a"] + }, + { + "id": "telegram-bot-api-token", + "samples": ["52590039:AbM1qhPt10XJ93tkWlM_54otulznq8q9Oed"] + }, + { + "id": "twilio-api-key", + "samples": ["SK7dF46e2e21CcC5A2f1997a4648DAFfbC"] + }, + { + "id": "vault-batch-token", + "samples": ["hvb.b167n7806s59tc4u60znk7n4hmrdcra57y_f2y7dfi9u4j7mtlbrz9bgob0bq5ypsab7ey4i4vhb95-9rew2puf_x431_6zmwub4hai6t5ye1-iuc2wlj7a_7dkl6cj976rp4h1r71jv8bixvjyhkod0ud8e18-q"] + }, + { + "id": "vault-service-token", + "samples": ["hvs.g_700cgmm40xcdyxkp9yjg6qza83ozot-zyk7qaalda0szelje0mxhyadrb0fo_ypgyoqo25p8nfgg9-7wuj_c83byhpol68fnk"] + } + ] +} diff --git a/yarn.lock b/yarn.lock index 88017d7df6a..2d3b7eea1d7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -392,10 +392,10 @@ dependencies: node-gyp-build "^3.9.0" -"@datadog/native-iast-rewriter@2.1.3": - version "2.1.3" - resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.1.3.tgz#1964cd856655b9c4d0e144048af59a2e90910901" - integrity sha512-4oxMFz5ZEpOK3pRc9KjquMgkRP6D+oPQVIzOk4dgG8fl2iepHtCa3gna/fQBfdWIiX5a2j65O3R1zNp2ckk8JA== +"@datadog/native-iast-rewriter@2.2.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.0.tgz#cfcdfaf128450f7d92a840eee8cd030b9746f49c" + integrity sha512-YrCgLGvOQh3EkWYjqZKpelg60idtMcC/jWskZSdr4KxvF61BM9zp5NF6HeUKON6RHCmqDqFS3wyj1NNRMID1VQ== dependencies: lru-cache "^7.14.0" node-gyp-build "^4.5.0" From 73502109ac11305302e83cdb1e7e97d7a865912d Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Tue, 31 Oct 2023 08:45:15 +0100 Subject: [PATCH 039/147] Add configurable IAST redaction pattern (#3720) * Make IAST redaction pattern configurables * Fix custom IAST redaction pattern configuration description * Use optional chaining operator in config * Refactor vulnerability formatter custom redaction patterns test * Fix PR comments * Optional chaining for iast experimental root configuration * Fix config options override test * Fix linting --- docs/test.ts | 4 +- index.d.ts | 10 ++- .../evidence-redaction/sensitive-handler.js | 19 +++++ .../iast/vulnerabilities-formatter/index.js | 3 +- .../src/appsec/iast/vulnerability-reporter.js | 6 +- packages/dd-trace/src/config.js | 28 +++++-- .../sensitive-handler.spec.js | 80 +++++++++++++++++++ .../vulnerability-formatter/index.spec.js | 19 +++++ .../iast/vulnerability-reporter.spec.js | 6 +- packages/dd-trace/test/config.spec.js | 18 ++++- 10 files changed, 179 insertions(+), 14 deletions(-) diff --git a/docs/test.ts b/docs/test.ts index 1c81f92173b..56532363b7a 100644 --- a/docs/test.ts +++ b/docs/test.ts @@ -122,7 +122,9 @@ tracer.init({ maxConcurrentRequests: 4, maxContextOperations: 30, deduplicationEnabled: true, - redactionEnabled: true + redactionEnabled: true, + redactionNamePattern: 'password', + redactionValuePattern: 'bearer' } } }) diff --git a/index.d.ts b/index.d.ts index da29ed311f2..75c03519f38 100644 --- a/index.d.ts +++ b/index.d.ts @@ -453,7 +453,15 @@ export declare interface TracerOptions { * Whether to enable vulnerability redaction * @default true */ - redactionEnabled?: boolean + redactionEnabled?: boolean, + /** + * Specifies a regex that will redact sensitive source names in vulnerability reports. + */ + redactionNamePattern?: string, + /** + * Specifies a regex that will redact sensitive source values in vulnerability reports. + */ + redactionValuePattern?: string } }; diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js index 8dcb59f1b45..4641876e934 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js @@ -1,5 +1,6 @@ 'use strict' +const iastLog = require('../../iast-log') const vulnerabilities = require('../../vulnerabilities') const { contains, intersects, remove } = require('./range-utils') @@ -263,6 +264,24 @@ class SensitiveHandler { valueParts.push({ redacted: true }) } } + + setRedactionPatterns (redactionNamePattern, redactionValuePattern) { + if (redactionNamePattern) { + try { + this._namePattern = new RegExp(redactionNamePattern, 'gmi') + } catch (e) { + iastLog.warn('Redaction name pattern is not valid') + } + } + + if (redactionValuePattern) { + try { + this._valuePattern = new RegExp(redactionValuePattern, 'gmi') + } catch (e) { + iastLog.warn('Redaction value pattern is not valid') + } + } + } } module.exports = new SensitiveHandler() diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/index.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/index.js index 29611ccbc59..9dfca76a9e6 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/index.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/index.js @@ -8,8 +8,9 @@ class VulnerabilityFormatter { this._redactVulnearbilities = true } - setRedactVulnerabilities (shouldRedactVulnerabilities) { + setRedactVulnerabilities (shouldRedactVulnerabilities, redactionNamePattern, redactionValuePattern) { this._redactVulnearbilities = shouldRedactVulnerabilities + sensitiveHandler.setRedactionPatterns(redactionNamePattern, redactionValuePattern) } extractSourcesFromVulnerability (vulnerability) { diff --git a/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js b/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js index 4041a25cc96..cd6bf5f1180 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js +++ b/packages/dd-trace/src/appsec/iast/vulnerability-reporter.js @@ -95,7 +95,11 @@ function deduplicateVulnerabilities (vulnerabilities) { function start (config, _tracer) { deduplicationEnabled = config.iast.deduplicationEnabled - vulnerabilitiesFormatter.setRedactVulnerabilities(config.iast.redactionEnabled) + vulnerabilitiesFormatter.setRedactVulnerabilities( + config.iast.redactionEnabled, + config.iast.redactionNamePattern, + config.iast.redactionValuePattern + ) if (deduplicationEnabled) { startClearCacheTimer() } diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 8d44e59f91b..fc416f2bd8b 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -447,7 +447,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) 5 // seconds ) - const iastOptions = options.experimental && options.experimental.iast + const iastOptions = options?.experimental?.iast const DD_IAST_ENABLED = coalesce( iastOptions && (iastOptions === true || iastOptions.enabled === true), @@ -461,7 +461,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) const defaultIastRequestSampling = 30 const iastRequestSampling = coalesce( - parseInt(iastOptions && iastOptions.requestSampling), + parseInt(iastOptions?.requestSampling), parseInt(process.env.DD_IAST_REQUEST_SAMPLING), defaultIastRequestSampling ) @@ -469,31 +469,43 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) iastRequestSampling > 100 ? defaultIastRequestSampling : iastRequestSampling const DD_IAST_MAX_CONCURRENT_REQUESTS = coalesce( - parseInt(iastOptions && iastOptions.maxConcurrentRequests), + parseInt(iastOptions?.maxConcurrentRequests), parseInt(process.env.DD_IAST_MAX_CONCURRENT_REQUESTS), 2 ) const DD_IAST_MAX_CONTEXT_OPERATIONS = coalesce( - parseInt(iastOptions && iastOptions.maxContextOperations), + parseInt(iastOptions?.maxContextOperations), parseInt(process.env.DD_IAST_MAX_CONTEXT_OPERATIONS), 2 ) const DD_IAST_DEDUPLICATION_ENABLED = coalesce( - iastOptions && iastOptions.deduplicationEnabled, + iastOptions?.deduplicationEnabled, process.env.DD_IAST_DEDUPLICATION_ENABLED && isTrue(process.env.DD_IAST_DEDUPLICATION_ENABLED), true ) const DD_IAST_REDACTION_ENABLED = coalesce( - iastOptions && iastOptions.redactionEnabled, + iastOptions?.redactionEnabled, !isFalse(process.env.DD_IAST_REDACTION_ENABLED), true ) + const DD_IAST_REDACTION_NAME_PATTERN = coalesce( + iastOptions?.redactionNamePattern, + process.env.DD_IAST_REDACTION_NAME_PATTERN, + null + ) + + const DD_IAST_REDACTION_VALUE_PATTERN = coalesce( + iastOptions?.redactionValuePattern, + process.env.DD_IAST_REDACTION_VALUE_PATTERN, + null + ) + const DD_IAST_TELEMETRY_VERBOSITY = coalesce( - iastOptions && iastOptions.telemetryVerbosity, + iastOptions?.telemetryVerbosity, process.env.DD_IAST_TELEMETRY_VERBOSITY, 'INFORMATION' ) @@ -620,6 +632,8 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) maxContextOperations: DD_IAST_MAX_CONTEXT_OPERATIONS, deduplicationEnabled: DD_IAST_DEDUPLICATION_ENABLED, redactionEnabled: DD_IAST_REDACTION_ENABLED, + redactionNamePattern: DD_IAST_REDACTION_NAME_PATTERN, + redactionValuePattern: DD_IAST_REDACTION_VALUE_PATTERN, telemetryVerbosity: DD_IAST_TELEMETRY_VERBOSITY } diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js index bde517eb992..a9c1ae465ce 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js +++ b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js @@ -2,6 +2,8 @@ const sensitiveHandler = require('../../../../../src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler') +const { DEFAULT_IAST_REDACTION_NAME_PATTERN, DEFAULT_IAST_REDACTION_VALUE_PATTERN } = + require('../../../../../src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-regex') const { suite } = require('../resources/evidence-redaction-suite.json') @@ -18,6 +20,84 @@ function doTest (testCase, parameter) { } describe('Sensitive handler', () => { + beforeEach(() => { + sensitiveHandler.setRedactionPatterns(DEFAULT_IAST_REDACTION_NAME_PATTERN, DEFAULT_IAST_REDACTION_VALUE_PATTERN) + }) + + describe('Custom redaction patterns', () => { + describe('Default redaction patterns', () => { + it('should use default patterns when null ones are set', () => { + sensitiveHandler.setRedactionPatterns(null, null) + expect(sensitiveHandler._namePattern.source).to.be.equals(DEFAULT_IAST_REDACTION_NAME_PATTERN) + expect(sensitiveHandler._valuePattern.source).to.be.equals(DEFAULT_IAST_REDACTION_VALUE_PATTERN) + }) + + it('should use default name pattern when custom name pattern is null', () => { + const customValuePattern = 'valuePattern' + sensitiveHandler.setRedactionPatterns(null, customValuePattern) + expect(sensitiveHandler._namePattern.source).to.be.equals(DEFAULT_IAST_REDACTION_NAME_PATTERN) + expect(sensitiveHandler._valuePattern.source).to.be.equals(customValuePattern) + }) + + it('should use default value pattern when custom value pattern is null', () => { + const customNamePattern = 'namePattern' + sensitiveHandler.setRedactionPatterns(customNamePattern, null) + expect(sensitiveHandler._namePattern.source).to.be.equals(customNamePattern) + expect(sensitiveHandler._valuePattern.source).to.be.equals(DEFAULT_IAST_REDACTION_VALUE_PATTERN) + }) + }) + + describe('Not valid custom patterns', () => { + const iastLog = require('../../../../../src/appsec/iast/iast-log') + + beforeEach(() => { + sinon.stub(iastLog, 'warn') + }) + + afterEach(() => { + sinon.restore() + }) + + it('should use default patterns when not valid ones are set', () => { + sensitiveHandler.setRedactionPatterns('(unterminated_group', 'unmatched)') + expect(sensitiveHandler._namePattern.source).to.be.equals(DEFAULT_IAST_REDACTION_NAME_PATTERN) + expect(sensitiveHandler._valuePattern.source).to.be.equals(DEFAULT_IAST_REDACTION_VALUE_PATTERN) + + expect(iastLog.warn).to.have.been.calledTwice + expect(iastLog.warn.firstCall.args[0]).to.be.equals('Redaction name pattern is not valid') + expect(iastLog.warn.secondCall.args[0]).to.be.equals('Redaction value pattern is not valid') + }) + + it('should use default name pattern when custom name pattern is not valid', () => { + const customValuePattern = 'valuePattern' + sensitiveHandler.setRedactionPatterns('(unterminated_group', customValuePattern) + expect(sensitiveHandler._namePattern.source).to.be.equals(DEFAULT_IAST_REDACTION_NAME_PATTERN) + expect(sensitiveHandler._valuePattern.source).to.be.equals(customValuePattern) + + expect(iastLog.warn).to.have.been.calledOnceWithExactly('Redaction name pattern is not valid') + }) + + it('should use default value pattern when custom value pattern is not valid', () => { + const customNamePattern = 'namePattern' + sensitiveHandler.setRedactionPatterns(customNamePattern, 'unmatched)') + expect(sensitiveHandler._namePattern.source).to.be.equals(customNamePattern) + expect(sensitiveHandler._valuePattern.source).to.be.equals(DEFAULT_IAST_REDACTION_VALUE_PATTERN) + + expect(iastLog.warn).to.have.been.calledOnceWithExactly('Redaction value pattern is not valid') + }) + }) + + it('Valid custom patterns', () => { + expect(sensitiveHandler.isSensibleName('sensibleName')).to.be.false + expect(sensitiveHandler.isSensibleValue('sensibleValue')).to.be.false + + sensitiveHandler.setRedactionPatterns('sensibleName', 'sensibleValue') + + expect(sensitiveHandler.isSensibleName('sensibleName')).to.be.true + expect(sensitiveHandler.isSensibleValue('sensibleValue')).to.be.true + }) + }) + describe('Sensible sources', () => { suite.filter(testCase => testCase.type === 'SOURCES').forEach((testCase) => { for (const name in testCase.parameters) { diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js index ac83665e6a9..81fa0180a6d 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js @@ -1,6 +1,8 @@ 'use strict' const vulnerabilityFormatter = require('../../../../src/appsec/iast/vulnerabilities-formatter') +const sensitiveHandler = + require('../../../../src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler') const { suite } = require('./resources/evidence-redaction-suite.json') @@ -91,4 +93,21 @@ describe('Vulnerability formatter', () => { expect(json.vulnerabilities[0].location.column).to.be.undefined }) }) + + describe('Custom redaction patterns', () => { + beforeEach(() => { + sinon.stub(sensitiveHandler, 'setRedactionPatterns') + }) + + afterEach(() => { + sinon.restore() + }) + + it('should set custom redaction patterns', () => { + vulnerabilityFormatter.setRedactVulnerabilities(true, 'customNamePattern', 'customValuePattern') + + expect(sensitiveHandler.setRedactionPatterns) + .to.have.been.calledOnceWithExactly('customNamePattern', 'customValuePattern') + }) + }) }) diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js b/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js index cdd1e37af31..bab1f9aca53 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js +++ b/packages/dd-trace/test/appsec/iast/vulnerability-reporter.spec.js @@ -474,11 +474,13 @@ describe('vulnerability-reporter', () => { it('should set evidence redaction on vulnerability formatter', () => { const config = { iast: { - redactionEnabled: true + redactionEnabled: true, + redactionNamePattern: null, + redactionValuePattern: null } } start(config) - expect(vulnerabilityFormatter.setRedactVulnerabilities).to.have.been.calledOnceWithExactly(true) + expect(vulnerabilityFormatter.setRedactVulnerabilities).to.have.been.calledOnceWithExactly(true, null, null) }) }) }) diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index df5759b87ee..a53794ade36 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -115,6 +115,8 @@ describe('Config', () => { expect(config).to.have.nested.property('remoteConfig.pollInterval', 5) expect(config).to.have.nested.property('iast.enabled', false) expect(config).to.have.nested.property('iast.redactionEnabled', true) + expect(config).to.have.nested.property('iast.redactionNamePattern', null) + expect(config).to.have.nested.property('iast.redactionValuePattern', null) expect(config).to.have.nested.property('iast.telemetryVerbosity', 'INFORMATION') }) @@ -215,6 +217,8 @@ describe('Config', () => { process.env.DD_IAST_MAX_CONTEXT_OPERATIONS = '4' process.env.DD_IAST_DEDUPLICATION_ENABLED = false process.env.DD_IAST_REDACTION_ENABLED = false + process.env.DD_IAST_REDACTION_NAME_PATTERN = 'REDACTION_NAME_PATTERN' + process.env.DD_IAST_REDACTION_VALUE_PATTERN = 'REDACTION_VALUE_PATTERN' process.env.DD_IAST_TELEMETRY_VERBOSITY = 'DEBUG' process.env.DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED = 'true' process.env.DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED = 'true' @@ -291,6 +295,8 @@ describe('Config', () => { expect(config).to.have.nested.property('iast.maxContextOperations', 4) expect(config).to.have.nested.property('iast.deduplicationEnabled', false) expect(config).to.have.nested.property('iast.redactionEnabled', false) + expect(config).to.have.nested.property('iast.redactionNamePattern', 'REDACTION_NAME_PATTERN') + expect(config).to.have.nested.property('iast.redactionValuePattern', 'REDACTION_VALUE_PATTERN') expect(config).to.have.nested.property('iast.telemetryVerbosity', 'DEBUG') }) @@ -411,6 +417,8 @@ describe('Config', () => { maxContextOperations: 5, deduplicationEnabled: false, redactionEnabled: false, + redactionNamePattern: 'REDACTION_NAME_PATTERN', + redactionValuePattern: 'REDACTION_VALUE_PATTERN', telemetryVerbosity: 'DEBUG' } }, @@ -467,6 +475,8 @@ describe('Config', () => { expect(config).to.have.nested.property('iast.maxContextOperations', 5) expect(config).to.have.nested.property('iast.deduplicationEnabled', false) expect(config).to.have.nested.property('iast.redactionEnabled', false) + expect(config).to.have.nested.property('iast.redactionNamePattern', 'REDACTION_NAME_PATTERN') + expect(config).to.have.nested.property('iast.redactionValuePattern', 'REDACTION_VALUE_PATTERN') expect(config).to.have.nested.property('iast.telemetryVerbosity', 'DEBUG') expect(config).to.have.deep.nested.property('sampler', { sampleRate: 0.5, @@ -648,6 +658,8 @@ describe('Config', () => { process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING = 'disabled' process.env.DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS = 11 process.env.DD_IAST_ENABLED = 'false' + process.env.DD_IAST_REDACTION_NAME_PATTERN = 'name_pattern_to_be_overriden_by_options' + process.env.DD_IAST_REDACTION_VALUE_PATTERN = 'value_pattern_to_be_overriden_by_options' process.env.DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED = 'true' process.env.DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED = 'true' @@ -691,7 +703,9 @@ describe('Config', () => { exporter: 'agent', enableGetRumData: false, iast: { - enabled: true + enabled: true, + redactionNamePattern: 'REDACTION_NAME_PATTERN', + redactionValuePattern: 'REDACTION_VALUE_PATTERN' } }, appsec: { @@ -761,6 +775,8 @@ describe('Config', () => { expect(config).to.have.nested.property('iast.maxContextOperations', 2) expect(config).to.have.nested.property('iast.deduplicationEnabled', true) expect(config).to.have.nested.property('iast.redactionEnabled', true) + expect(config).to.have.nested.property('iast.redactionNamePattern', 'REDACTION_NAME_PATTERN') + expect(config).to.have.nested.property('iast.redactionValuePattern', 'REDACTION_VALUE_PATTERN') }) it('should give priority to non-experimental options', () => { From 1ccefc21bb20c4c155910a8855496253a3899ad6 Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Tue, 31 Oct 2023 10:25:38 +0100 Subject: [PATCH 040/147] Generic telemetry logs (#3647) --- .gitignore | 1 + packages/dd-trace/src/appsec/iast/iast-log.js | 13 +- .../src/appsec/iast/telemetry/index.js | 5 - .../src/appsec/iast/telemetry/log/index.js | 87 -------- packages/dd-trace/src/config.js | 2 +- packages/dd-trace/src/telemetry/index.js | 4 + packages/dd-trace/src/telemetry/logs/index.js | 65 ++++++ .../log => telemetry/logs}/log-collector.js | 31 +-- .../test/appsec/iast/iast-log.spec.js | 95 +++------ .../test/appsec/iast/telemetry/index.spec.js | 16 -- .../appsec/iast/telemetry/log/index.spec.js | 200 ------------------ packages/dd-trace/test/config.spec.js | 8 +- .../test/telemetry/logs/index.spec.js | 182 ++++++++++++++++ .../logs}/log-collector.spec.js | 15 +- 14 files changed, 317 insertions(+), 407 deletions(-) delete mode 100644 packages/dd-trace/src/appsec/iast/telemetry/log/index.js create mode 100644 packages/dd-trace/src/telemetry/logs/index.js rename packages/dd-trace/src/{appsec/iast/telemetry/log => telemetry/logs}/log-collector.js (70%) delete mode 100644 packages/dd-trace/test/appsec/iast/telemetry/log/index.spec.js create mode 100644 packages/dd-trace/test/telemetry/logs/index.spec.js rename packages/dd-trace/test/{appsec/iast/telemetry/log => telemetry/logs}/log-collector.spec.js (84%) diff --git a/.gitignore b/.gitignore index 6835a6dd270..ff2cfaa8e23 100644 --- a/.gitignore +++ b/.gitignore @@ -124,3 +124,4 @@ packages/datadog-plugin-next/test/yarn.lock packages/dd-trace/test/appsec/next/*/package.json packages/dd-trace/test/appsec/next/*/node_modules packages/dd-trace/test/appsec/next/*/yarn.lock +!packages/dd-trace/**/telemetry/logs diff --git a/packages/dd-trace/src/appsec/iast/iast-log.js b/packages/dd-trace/src/appsec/iast/iast-log.js index 6d6e2171de4..36b10f6fce5 100644 --- a/packages/dd-trace/src/appsec/iast/iast-log.js +++ b/packages/dd-trace/src/appsec/iast/iast-log.js @@ -1,9 +1,11 @@ 'use strict' +const dc = require('../../../../diagnostics_channel') const log = require('../../log') -const telemetryLogs = require('./telemetry/log') const { calculateDDBasePath } = require('../../util') +const telemetryLog = dc.channel('datadog:telemetry:log') + const ddBasePath = calculateDDBasePath(__dirname) const EOL = '\n' const STACK_FRAME_LINE_REGEX = /^\s*at\s/gm @@ -80,9 +82,8 @@ const iastLog = { }, publish (data, level) { - if (telemetryLogs.isLevelEnabled(level)) { - const telemetryLog = getTelemetryLog(data, level) - telemetryLogs.publish(telemetryLog) + if (telemetryLog.hasSubscribers) { + telemetryLog.publish(getTelemetryLog(data, level)) } return this }, @@ -92,6 +93,10 @@ const iastLog = { return this.publish(data, 'DEBUG') }, + /** + * forward 'INFO' log level to 'DEBUG' telemetry log level + * see also {@link ../../telemetry/logs#isLevelEnabled } method + */ infoAndPublish (data) { this.info(data) return this.publish(data, 'DEBUG') diff --git a/packages/dd-trace/src/appsec/iast/telemetry/index.js b/packages/dd-trace/src/appsec/iast/telemetry/index.js index 1c5da375329..2b28ddf96fe 100644 --- a/packages/dd-trace/src/appsec/iast/telemetry/index.js +++ b/packages/dd-trace/src/appsec/iast/telemetry/index.js @@ -1,7 +1,6 @@ 'use strict' const telemetryMetrics = require('../../../telemetry/metrics') -const telemetryLogs = require('./log') const { Verbosity, getVerbosity } = require('./verbosity') const { initRequestNamespace, finalizeRequestNamespace, globalNamespace } = require('./namespaces') @@ -15,15 +14,11 @@ class Telemetry { if (this.enabled) { telemetryMetrics.manager.set('iast', globalNamespace) } - - telemetryLogs.start() } stop () { this.enabled = false telemetryMetrics.manager.delete('iast') - - telemetryLogs.stop() } isEnabled () { diff --git a/packages/dd-trace/src/appsec/iast/telemetry/log/index.js b/packages/dd-trace/src/appsec/iast/telemetry/log/index.js deleted file mode 100644 index 55cc0e75c5f..00000000000 --- a/packages/dd-trace/src/appsec/iast/telemetry/log/index.js +++ /dev/null @@ -1,87 +0,0 @@ -'use strict' - -const dc = require('../../../../../../diagnostics_channel') -const logCollector = require('./log-collector') -const { sendData } = require('../../../../telemetry/send-data') -const log = require('../../../../log') - -const telemetryStartChannel = dc.channel('datadog:telemetry:start') -const telemetryStopChannel = dc.channel('datadog:telemetry:stop') - -let config, application, host, interval - -function publish (log) { - if (log && isLevelEnabled(log.level)) { - logCollector.add(log) - } -} - -function sendLogs () { - try { - const logs = logCollector.drain() - if (logs) { - sendData(config, application, host, 'logs', logs) - } - } catch (e) { - log.error(e) - } -} - -function isLevelEnabled (level) { - return isLogCollectionEnabled(config) && level !== 'DEBUG' -} - -function isLogCollectionEnabled (config) { - return config && config.telemetry && config.telemetry.logCollection -} - -function onTelemetryStart (msg) { - if (!msg || !isLogCollectionEnabled(msg.config)) { - log.info('IAST telemetry logs start event received but log collection is not enabled or configuration is incorrect') - return false - } - - log.info('IAST telemetry logs starting') - - config = msg.config - application = msg.application - host = msg.host - - if (msg.heartbeatInterval) { - interval = setInterval(sendLogs, msg.heartbeatInterval) - interval.unref() - } - - return true -} - -function onTelemetryStop () { - stop() -} - -function start () { - telemetryStartChannel.subscribe(onTelemetryStart) - telemetryStopChannel.subscribe(onTelemetryStop) -} - -function stop () { - if (!isLogCollectionEnabled(config)) return - - log.info('IAST telemetry logs stopping') - - config = null - application = null - host = null - - if (telemetryStartChannel.hasSubscribers) { - telemetryStartChannel.unsubscribe(onTelemetryStart) - } - - if (telemetryStopChannel.hasSubscribers) { - telemetryStopChannel.unsubscribe(onTelemetryStop) - } - - clearInterval(interval) -} - -module.exports = { start, stop, publish, isLevelEnabled } diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index fc416f2bd8b..65d3df16e16 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -600,8 +600,8 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this.telemetry = { enabled: DD_TRACE_EXPORTER !== 'datadog' && isTrue(DD_TRACE_TELEMETRY_ENABLED), heartbeatInterval: DD_TELEMETRY_HEARTBEAT_INTERVAL, - logCollection: isTrue(DD_TELEMETRY_LOG_COLLECTION_ENABLED), debug: isTrue(DD_TELEMETRY_DEBUG), + logCollection: isTrue(DD_TELEMETRY_LOG_COLLECTION_ENABLED), metrics: isTrue(DD_TELEMETRY_METRICS_ENABLED) } this.protocolVersion = DD_TRACE_AGENT_PROTOCOL_VERSION diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js index 280a28ccca7..b3de33bbc18 100644 --- a/packages/dd-trace/src/telemetry/index.js +++ b/packages/dd-trace/src/telemetry/index.js @@ -7,6 +7,7 @@ const dependencies = require('./dependencies') const { sendData } = require('./send-data') const { manager: metricsManager } = require('./metrics') +const logs = require('./logs') const telemetryStartChannel = dc.channel('datadog:telemetry:start') const telemetryStopChannel = dc.channel('datadog:telemetry:stop') @@ -139,10 +140,13 @@ function start (aConfig, thePluginManager) { heartbeatInterval = config.telemetry.heartbeatInterval dependencies.start(config, application, host) + logs.start(config) + sendData(config, application, host, 'app-started', appStarted()) heartbeat(config, application, host) interval = setInterval(() => { metricsManager.send(config, application, host) + logs.send(config, application, host) }, heartbeatInterval) interval.unref() process.on('beforeExit', onBeforeExit) diff --git a/packages/dd-trace/src/telemetry/logs/index.js b/packages/dd-trace/src/telemetry/logs/index.js new file mode 100644 index 00000000000..4bc7a6b4a24 --- /dev/null +++ b/packages/dd-trace/src/telemetry/logs/index.js @@ -0,0 +1,65 @@ +'use strict' + +const dc = require('../../../../diagnostics_channel') +const logCollector = require('./log-collector') +const { sendData } = require('../send-data') + +const telemetryLog = dc.channel('datadog:telemetry:log') + +let enabled = false + +/** + * Telemetry logs api defines only ERROR, WARN and DEBUG levels: + * - WARN level is enabled by default + * - DEBUG level will be possible to activate with an env var or telemetry config property + */ +function isLevelEnabled (level) { + return isValidLevel(level) +} + +function isValidLevel (level) { + switch (level) { + case 'ERROR': + case 'WARN': + return true + default: + return false + } +} + +function onLog (log) { + if (isLevelEnabled(log?.level?.toUpperCase())) { + logCollector.add(log) + } +} + +function start (config) { + if (!config.telemetry.logCollection || enabled) return + + enabled = true + + telemetryLog.subscribe(onLog) +} + +function stop () { + enabled = false + + if (telemetryLog.hasSubscribers) { + telemetryLog.unsubscribe(onLog) + } +} + +function send (config, application, host) { + if (!enabled) return + + const logs = logCollector.drain() + if (logs) { + sendData(config, application, host, 'logs', logs) + } +} + +module.exports = { + start, + stop, + send +} diff --git a/packages/dd-trace/src/appsec/iast/telemetry/log/log-collector.js b/packages/dd-trace/src/telemetry/logs/log-collector.js similarity index 70% rename from packages/dd-trace/src/appsec/iast/telemetry/log/log-collector.js rename to packages/dd-trace/src/telemetry/logs/log-collector.js index 8e66b98ccae..740f5453797 100644 --- a/packages/dd-trace/src/appsec/iast/telemetry/log/log-collector.js +++ b/packages/dd-trace/src/telemetry/logs/log-collector.js @@ -1,6 +1,6 @@ 'use strict' -const log = require('../../../../log') +const log = require('../../log') const logs = new Map() @@ -18,37 +18,21 @@ function hashCode (hashSource) { } function createHash (logEntry) { - if (!logEntry) return 0 - const prime = 31 let result = ((!logEntry.level) ? 0 : hashCode(logEntry.level)) result = (((prime * result) | 0) + ((!logEntry.message) ? 0 : hashCode(logEntry.message))) | 0 - - // NOTE: tags are not used at the moment - // result = (((prime * result) | 0) + ((!logEntry.tags) ? 0 : hashCode(logEntry.tags))) | 0 result = (((prime * result) | 0) + ((!logEntry.stack_trace) ? 0 : hashCode(logEntry.stack_trace))) | 0 return result } -function newLogEntry (message, level, tags) { - return { - message, - level, - tags - } -} - function isValid (logEntry) { - return logEntry && logEntry.level && logEntry.message + return logEntry?.level && logEntry.message } const logCollector = { add (logEntry) { try { - if (!isValid(logEntry)) { - log.info('IAST log collector discarding invalid log') - return - } + if (!isValid(logEntry)) return false // NOTE: should errors have higher priority? and discard log entries with lower priority? if (logs.size >= maxEntries) { @@ -70,11 +54,13 @@ const logCollector = { drain () { if (logs.size === 0) return - const drained = [] - drained.push(...logs.values()) + const drained = [...logs.values()] if (overflowedCount > 0) { - drained.push(newLogEntry(`Omitted ${overflowedCount} entries due to overflowing`, 'ERROR')) + drained.push({ + message: `Omitted ${overflowedCount} entries due to overflowing`, + level: 'ERROR' + }) } this.reset() @@ -85,6 +71,7 @@ const logCollector = { reset (max) { logs.clear() overflowedCount = 0 + if (max) { maxEntries = max } diff --git a/packages/dd-trace/test/appsec/iast/iast-log.spec.js b/packages/dd-trace/test/appsec/iast/iast-log.spec.js index b3925c8f1e7..c6dba11a461 100644 --- a/packages/dd-trace/test/appsec/iast/iast-log.spec.js +++ b/packages/dd-trace/test/appsec/iast/iast-log.spec.js @@ -6,65 +6,48 @@ const ddBasePath = calculateDDBasePath(__dirname) const EOL = '\n' describe('IAST log', () => { - const telemetryDefaultConfig = { - config: { - telemetry: { - logCollection: true - } - } - } - let iastLog - let telemetryStartChannel - let telemetryLogs - let onTelemetryStart + let telemetryLog let log beforeEach(() => { - let subs = 0 - telemetryStartChannel = { - get hasSubscribers () { - return subs > 0 - }, - subscribe: (onTelemetryStartHandler) => { - onTelemetryStart = onTelemetryStartHandler - subs++ - }, - unsubscribe: () => { - subs-- - }, - publish: sinon.stub() - } - - const telemetryStopChannel = { - subscribe: () => {}, - unsubscribe: () => {} - } - log = { debug: sinon.stub(), info: sinon.stub(), warn: sinon.stub(), error: sinon.stub() } - telemetryLogs = proxyquire('../../../src/appsec/iast/telemetry/log', { - '../../../../../../diagnostics_channel': { - channel: (name) => name === 'datadog:telemetry:start' ? telemetryStartChannel : telemetryStopChannel - } - }) - sinon.stub(telemetryLogs, 'publish') - telemetryLogs.start() + telemetryLog = { + hasSubscribers: true, + publish: sinon.stub() + } iastLog = proxyquire('../../../src/appsec/iast/iast-log', { - './telemetry/log': telemetryLogs, + '../../../../diagnostics_channel': { + channel: () => telemetryLog + }, '../../log': log }) }) afterEach(() => { sinon.reset() - telemetryLogs.stop() + }) + + describe('debug', () => { + it('should call log.debug', () => { + iastLog.debug('debug') + + expect(log.debug).to.be.calledOnceWith('debug') + }) + + it('should call log.debug and publish msg via telemetry', () => { + iastLog.debugAndPublish('debug') + + expect(log.debug).to.be.calledOnceWith('debug') + expect(telemetryLog.publish).to.be.calledOnceWith({ message: 'debug', level: 'DEBUG' }) + }) }) describe('warn', () => { @@ -75,24 +58,20 @@ describe('IAST log', () => { }) it('should call log.warn and publish msg via telemetry', () => { - onTelemetryStart(telemetryDefaultConfig) - iastLog.warnAndPublish('warn') expect(log.warn).to.be.calledOnceWith('warn') - expect(telemetryLogs.publish).to.be.calledOnceWith({ message: 'warn', level: 'WARN' }) + expect(telemetryLog.publish).to.be.calledOnceWith({ message: 'warn', level: 'WARN' }) }) it('should chain multiple warn calls', () => { - onTelemetryStart(telemetryDefaultConfig) - iastLog.warn('warn').warnAndPublish('warnAndPublish').warn('warn2') expect(log.warn).to.be.calledThrice expect(log.warn.getCall(0).args[0]).to.be.eq('warn') expect(log.warn.getCall(1).args[0]).to.be.eq('warnAndPublish') expect(log.warn.getCall(2).args[0]).to.be.eq('warn2') - expect(telemetryLogs.publish).to.be.calledOnceWith({ message: 'warnAndPublish', level: 'WARN' }) + expect(telemetryLog.publish).to.be.calledOnceWith({ message: 'warnAndPublish', level: 'WARN' }) }) }) @@ -104,30 +83,24 @@ describe('IAST log', () => { }) it('should call log.error and publish msg via telemetry', () => { - onTelemetryStart(telemetryDefaultConfig) - iastLog.errorAndPublish('error') expect(log.error).to.be.calledOnceWith('error') - expect(telemetryLogs.publish).to.be.calledOnceWith({ message: 'error', level: 'ERROR' }) + expect(telemetryLog.publish).to.be.calledOnceWith({ message: 'error', level: 'ERROR' }) }) it('should chain multiple error calls', () => { - onTelemetryStart(telemetryDefaultConfig) - iastLog.error('error').errorAndPublish('errorAndPublish').error('error2') expect(log.error).to.be.calledThrice expect(log.error.getCall(0).args[0]).to.be.eq('error') expect(log.error.getCall(1).args[0]).to.be.eq('errorAndPublish') expect(log.error.getCall(2).args[0]).to.be.eq('error2') - expect(telemetryLogs.publish).to.be.calledOnceWith({ message: 'errorAndPublish', level: 'ERROR' }) + expect(telemetryLog.publish).to.be.calledOnceWith({ message: 'errorAndPublish', level: 'ERROR' }) }) it('should include original message and dd frames', () => { - onTelemetryStart(telemetryDefaultConfig) - - const ddFrame = `at T (${ddBasePath}packages/dd-trace/test/appsec/iast/telemetry/log_collector.spec.js:29:21)` + const ddFrame = `at T (${ddBasePath}packages/dd-trace/test/telemetry/logs/log_collector.spec.js:29:21)` const stack = new Error('Error 1') .stack.replace(`Error 1${EOL}`, `Error 1${EOL}${ddFrame}${EOL}`) @@ -138,8 +111,8 @@ describe('IAST log', () => { iastLog.errorAndPublish({ message: 'Error 1', stack }) - expect(telemetryLogs.publish).to.be.calledOnce - const log = telemetryLogs.publish.getCall(0).args[0] + expect(telemetryLog.publish).to.be.calledOnce + const log = telemetryLog.publish.getCall(0).args[0] expect(log.message).to.be.eq('Error 1') expect(log.level).to.be.eq('ERROR') @@ -152,10 +125,8 @@ describe('IAST log', () => { }) it('should not include original message if first frame is not a dd frame', () => { - onTelemetryStart(telemetryDefaultConfig) - const thirdPartyFrame = `at callFn (/this/is/not/a/dd/frame/runnable.js:366:21) - at T (${ddBasePath}packages/dd-trace/test/appsec/iast/telemetry/log_collector.spec.js:29:21)` + at T (${ddBasePath}packages/dd-trace/test/telemetry/logs/log_collector.spec.js:29:21)` const stack = new Error('Error 1') .stack.replace(`Error 1${EOL}`, `Error 1${EOL}${thirdPartyFrame}${EOL}`) @@ -166,9 +137,9 @@ describe('IAST log', () => { iastLog.errorAndPublish({ message: 'Error 1', stack }) - expect(telemetryLogs.publish).to.be.calledOnce + expect(telemetryLog.publish).to.be.calledOnce - const log = telemetryLogs.publish.getCall(0).args[0] + const log = telemetryLog.publish.getCall(0).args[0] expect(log.message).to.be.eq('omitted') expect(log.level).to.be.eq('ERROR') diff --git a/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js b/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js index 78a835b7cde..3914fc463ab 100644 --- a/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/telemetry/index.spec.js @@ -15,7 +15,6 @@ describe('Telemetry', () => { let defaultConfig let telemetryMetrics let iastTelemetry - let telemetryLogs let initRequestNamespace let finalizeRequestNamespace @@ -27,12 +26,6 @@ describe('Telemetry', () => { } } - telemetryLogs = { - registerProvider: () => telemetryLogs, - start: sinon.spy(), - stop: sinon.spy() - } - telemetryMetrics = { manager: { set: sinon.spy(), @@ -44,7 +37,6 @@ describe('Telemetry', () => { finalizeRequestNamespace = sinon.spy() iastTelemetry = proxyquire('../../../../src/appsec/iast/telemetry', { - './log': telemetryLogs, '../../../telemetry/metrics': telemetryMetrics, './namespaces': { initRequestNamespace, @@ -63,12 +55,10 @@ describe('Telemetry', () => { expect(iastTelemetry.enabled).to.be.true expect(iastTelemetry.verbosity).to.be.equal(Verbosity.INFORMATION) - expect(telemetryLogs.start).to.be.calledOnce }) it('should not enable telemetry if verbosity is OFF', () => { const iastTelemetry = proxyquire('../../../../src/appsec/iast/telemetry', { - './log': telemetryLogs, '../../../telemetry/metrics': telemetryMetrics }) @@ -80,7 +70,6 @@ describe('Telemetry', () => { expect(iastTelemetry.enabled).to.be.false expect(iastTelemetry.verbosity).to.be.equal(Verbosity.OFF) expect(telemetryMetrics.manager.set).to.not.be.called - expect(telemetryLogs.start).to.be.calledOnce }) it('should enable telemetry if telemetry.metrics is true', () => { @@ -92,7 +81,6 @@ describe('Telemetry', () => { expect(iastTelemetry.enabled).to.be.true expect(iastTelemetry.verbosity).to.be.equal(Verbosity.INFORMATION) expect(telemetryMetrics.manager.set).to.be.calledOnce - expect(telemetryLogs.start).to.be.calledOnce }) it('should not enable telemetry if telemetry.metrics is false', () => { @@ -104,7 +92,6 @@ describe('Telemetry', () => { expect(iastTelemetry.enabled).to.be.false expect(iastTelemetry.verbosity).to.be.equal(Verbosity.OFF) expect(telemetryMetrics.manager.set).to.not.be.called - expect(telemetryLogs.start).to.be.calledOnce }) }) @@ -115,7 +102,6 @@ describe('Telemetry', () => { iastTelemetry.stop() expect(iastTelemetry.enabled).to.be.false expect(telemetryMetrics.manager.delete).to.be.calledOnce - expect(telemetryLogs.stop).to.be.calledOnce }) }) @@ -132,7 +118,6 @@ describe('Telemetry', () => { it('should not call init if enabled and verbosity is Off', () => { const iastTelemetry = proxyquire('../../../../src/appsec/iast/telemetry', { '../../../telemetry/metrics': telemetryMetrics, - './log': telemetryLogs, './verbosity': { getVerbosity: () => Verbosity.OFF } @@ -161,7 +146,6 @@ describe('Telemetry', () => { it('should not call finalizeRequestNamespace if enabled and verbosity is Off', () => { const iastTelemetry = proxyquire('../../../../src/appsec/iast/telemetry', { '../../../telemetry/metrics': telemetryMetrics, - './log': telemetryLogs, './verbosity': { getVerbosity: () => Verbosity.OFF } diff --git a/packages/dd-trace/test/appsec/iast/telemetry/log/index.spec.js b/packages/dd-trace/test/appsec/iast/telemetry/log/index.spec.js deleted file mode 100644 index 6d3dc17b01c..00000000000 --- a/packages/dd-trace/test/appsec/iast/telemetry/log/index.spec.js +++ /dev/null @@ -1,200 +0,0 @@ -const { expect } = require('chai') -const { match } = require('sinon') -const proxyquire = require('proxyquire') - -describe('telemetry logs', () => { - let defaultConfig - let onTelemetryStartMsg - let telemetryStartChannel - let telemetryStopChannel - let onTelemetryStart - let onTelemetryStop - let dc - - beforeEach(() => { - defaultConfig = { - telemetry: { - enabled: true, - logCollection: true, - debug: false - } - } - - onTelemetryStartMsg = { config: defaultConfig, application: {}, host: {}, heartbeatInterval: 60000 } - - telemetryStartChannel = { - get hasSubscribers () { - return this.subscribe.callCount > 0 - }, - subscribe: sinon.stub(), - unsubscribe: sinon.stub() - } - - telemetryStopChannel = { - get hasSubscribers () { - return this.subscribe.callCount > 0 - }, - subscribe: sinon.stub(), - unsubscribe: sinon.stub() - } - - dc = { - channel: (name) => name === 'datadog:telemetry:start' ? telemetryStartChannel : telemetryStopChannel - } - - onTelemetryStart = () => telemetryStartChannel.subscribe.getCall(0).args[0] - onTelemetryStop = () => telemetryStopChannel.subscribe.getCall(0).args[0] - }) - - describe('start', () => { - it('should be enabled by default and subscribe', () => { - const logs = proxyquire('../../../../../src/appsec/iast/telemetry/log', { - '../../../../../../diagnostics_channel': dc - }) - logs.start() - defaultConfig.telemetry.logCollection = true - - expect(onTelemetryStart()({ config: defaultConfig })).to.be.true - expect(telemetryStartChannel.subscribe).to.have.been.calledOnce - expect(telemetryStopChannel.subscribe).to.have.been.calledOnce - }) - - it('should be disabled and not subscribe if DD_TELEMETRY_LOG_COLLECTION_ENABLED = false', () => { - const logs = proxyquire('../../../../../src/appsec/iast/telemetry/log', { - '../../../../../../diagnostics_channel': dc - }) - logs.start() - - defaultConfig.telemetry.logCollection = false - - expect(onTelemetryStart()({ config: defaultConfig })).to.be.false - }) - - it('should call sendData periodically', () => { - const clock = sinon.useFakeTimers() - const sendData = sinon.stub() - - let logCollectorCalled = 0 - const logs = proxyquire('../../../../../src/appsec/iast/telemetry/log', { - '../../../../../../diagnostics_channel': dc, - '../../../../telemetry/send-data': { sendData }, - './log-collector': { - drain: () => { - logCollectorCalled++ - return { message: 'Error 1', level: 'ERROR' } - } - } - }) - logs.start() - onTelemetryStart()(onTelemetryStartMsg) - - clock.tick(60000) - clock.tick(60000) - - expect(logCollectorCalled).to.be.eq(2) - expect(sendData).to.have.been.calledTwice - expect(sendData).to.have.been.calledWith(onTelemetryStartMsg.config, - onTelemetryStartMsg.application, - onTelemetryStartMsg.host, - 'logs' - ) - clock.restore() - }) - }) - - describe('stop', () => { - it('should unsubscribe configured listeners', () => { - const logs = proxyquire('../../../../../src/appsec/iast/telemetry/log', { - '../../../../../../diagnostics_channel': dc - }) - logs.start() - onTelemetryStart()(onTelemetryStartMsg) - - logs.stop() - - expect(telemetryStartChannel.unsubscribe).to.have.been.calledOnce - expect(telemetryStopChannel.unsubscribe).to.have.been.calledOnce - }) - - it('should unsubscribe configured listeners when datadog:telemetry:stop is received', () => { - const logs = proxyquire('../../../../../src/appsec/iast/telemetry/log', { - '../../../../../../diagnostics_channel': dc - }) - logs.start() - onTelemetryStart()(onTelemetryStartMsg) - - onTelemetryStop()() - - expect(telemetryStartChannel.unsubscribe).to.have.been.calledOnce - expect(telemetryStopChannel.unsubscribe).to.have.been.calledOnce - }) - }) - - describe('sendData', () => { - it('should be not called with DEBUG level', () => { - const logCollectorAdd = sinon.stub() - const logs = proxyquire('../../../../../src/appsec/iast/telemetry/log', { - '../../../../../../diagnostics_channel': dc, - './log-collector': { - add: logCollectorAdd - } - }) - logs.start() - onTelemetryStart()(onTelemetryStartMsg) - - logs.publish({ message: 'message', level: 'DEBUG' }) - - expect(logCollectorAdd).to.not.be.called - }) - - it('should be called with WARN level', () => { - const logCollectorAdd = sinon.stub() - const logs = proxyquire('../../../../../src/appsec/iast/telemetry/log', { - '../../../../../../diagnostics_channel': dc, - './log-collector': { - add: logCollectorAdd - } - }) - logs.start() - onTelemetryStart()(onTelemetryStartMsg) - - logs.publish({ message: 'message', level: 'WARN' }) - - expect(logCollectorAdd).to.be.calledOnceWith(match({ message: 'message', level: 'WARN' })) - }) - - it('should be called with ERROR level', () => { - const logCollectorAdd = sinon.stub() - const logs = proxyquire('../../../../../src/appsec/iast/telemetry/log', { - '../../../../../../diagnostics_channel': dc, - './log-collector': { - add: logCollectorAdd - } - }) - logs.start() - onTelemetryStart()(onTelemetryStartMsg) - - logs.publish({ message: 'message', level: 'ERROR' }) - - expect(logCollectorAdd).to.be.calledOnceWith(match({ message: 'message', level: 'ERROR' })) - }) - - it('should be called with ERROR level and stack_trace', () => { - const logCollectorAdd = sinon.stub() - const logs = proxyquire('../../../../../src/appsec/iast/telemetry/log', { - '../../../../../../diagnostics_channel': dc, - './log-collector': { - add: logCollectorAdd - } - }) - logs.start() - onTelemetryStart()(onTelemetryStartMsg) - - const error = new Error('message') - const stack = error.stack - logs.publish({ message: error.message, stack_trace: stack, level: 'ERROR' }) - - expect(logCollectorAdd).to.be.calledOnceWith(match({ message: 'message', level: 'ERROR', stack_trace: stack })) - }) - }) -}) diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index a53794ade36..9694dbc50e8 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -987,18 +987,18 @@ describe('Config', () => { process.env.DD_TELEMETRY_METRICS_ENABLED = origTelemetryMetricsEnabledValue }) - it('should set DD_TELEMETRY_LOG_COLLECTION_ENABLED = false', () => { - const origLogCollectionValue = process.env.DD_TELEMETRY_LOG_COLLECTION_ENABLED + it('should not set DD_TELEMETRY_LOG_COLLECTION_ENABLED', () => { + const origLogsValue = process.env.DD_TELEMETRY_LOG_COLLECTION_ENABLED process.env.DD_TELEMETRY_LOG_COLLECTION_ENABLED = 'false' const config = new Config() expect(config.telemetry.logCollection).to.be.false - process.env.DD_TELEMETRY_LOG_COLLECTION_ENABLED = origLogCollectionValue + process.env.DD_TELEMETRY_LOG_COLLECTION_ENABLED = origLogsValue }) - it('should set DD_TELEMETRY_LOG_COLLECTION_ENABLED = true if DD_IAST_ENABLED', () => { + it('should set DD_TELEMETRY_LOG_COLLECTION_ENABLED if DD_IAST_ENABLED', () => { const origIastEnabledValue = process.env.DD_IAST_ENABLED process.env.DD_IAST_ENABLED = 'true' diff --git a/packages/dd-trace/test/telemetry/logs/index.spec.js b/packages/dd-trace/test/telemetry/logs/index.spec.js new file mode 100644 index 00000000000..822543ba243 --- /dev/null +++ b/packages/dd-trace/test/telemetry/logs/index.spec.js @@ -0,0 +1,182 @@ +'use strict' + +require('../../setup/tap') + +const { match } = require('sinon') +const proxyquire = require('proxyquire') + +describe('telemetry logs', () => { + let defaultConfig + let telemetryLog + let dc + + beforeEach(() => { + defaultConfig = { + telemetry: { + enabled: true, + logCollection: true, + debug: false + } + } + + telemetryLog = { + get hasSubscribers () { + return this.subscribe.callCount > 0 + }, + subscribe: sinon.stub(), + unsubscribe: sinon.stub() + } + + dc = { + channel: () => telemetryLog + } + }) + + describe('start', () => { + it('should be enabled by default and subscribe', () => { + const logs = proxyquire('../../../src/telemetry/logs', { + '../../../../diagnostics_channel': dc + }) + + logs.start(defaultConfig) + + expect(telemetryLog.subscribe).to.have.been.calledOnce + }) + + it('should be subscribe only once', () => { + const logs = proxyquire('../../../src/telemetry/logs', { + '../../../../diagnostics_channel': dc + }) + + logs.start(defaultConfig) + logs.start(defaultConfig) + logs.start(defaultConfig) + + expect(telemetryLog.subscribe).to.have.been.calledOnce + }) + + it('should be disabled and not subscribe if DD_TELEMETRY_LOG_COLLECTION_ENABLED = false', () => { + const logs = proxyquire('../../../src/telemetry/logs', { + '../../../../diagnostics_channel': dc + }) + + defaultConfig.telemetry.logCollection = false + logs.start(defaultConfig) + + expect(telemetryLog.subscribe).to.not.been.called + }) + }) + + describe('stop', () => { + it('should unsubscribe configured listeners', () => { + const logs = proxyquire('../../../src/telemetry/logs', { + '../../../../diagnostics_channel': dc + }) + logs.start(defaultConfig) + + logs.stop() + + expect(telemetryLog.unsubscribe).to.have.been.calledOnce + }) + }) + + describe('logCollector add', () => { + const dc = require('../../../../diagnostics_channel') + let logCollectorAdd + let telemetryLog + + beforeEach(() => { + telemetryLog = dc.channel('datadog:telemetry:log') + + logCollectorAdd = sinon.stub() + const logs = proxyquire('../../../src/telemetry/logs', { + './log-collector': { + add: logCollectorAdd + } + }) + logs.start(defaultConfig) + }) + + it('should be not called with DEBUG level', () => { + telemetryLog.publish({ message: 'message', level: 'DEBUG' }) + + expect(logCollectorAdd).to.not.be.called + }) + + it('should be called with WARN level', () => { + telemetryLog.publish({ message: 'message', level: 'WARN' }) + + expect(logCollectorAdd).to.be.calledOnceWith(match({ message: 'message', level: 'WARN' })) + }) + + it('should be called with ERROR level', () => { + telemetryLog.publish({ message: 'message', level: 'ERROR' }) + + expect(logCollectorAdd).to.be.calledOnceWith(match({ message: 'message', level: 'ERROR' })) + }) + + it('should be called with ERROR level and stack_trace', () => { + const error = new Error('message') + const stack = error.stack + telemetryLog.publish({ message: error.message, stack_trace: stack, level: 'ERROR' }) + + expect(logCollectorAdd).to.be.calledOnceWith(match({ message: 'message', level: 'ERROR', stack_trace: stack })) + }) + + it('should not be called with no defined level', () => { + telemetryLog.publish({ message: 'message' }) + + expect(logCollectorAdd).to.not.be.called + }) + + it('should not be called with incorrect level', () => { + telemetryLog.publish({ message: 'message', level: 'INFO' }) + + expect(logCollectorAdd).to.not.be.called + }) + }) + + describe('send', () => { + let collectedLogs, application, host + let logs + let logCollectorDrain + let sendData + + beforeEach(() => { + collectedLogs = [{ message: 'message', level: 'ERROR' }] + application = {} + host = {} + + logCollectorDrain = sinon.stub().returns(collectedLogs) + sendData = sinon.stub() + + logs = proxyquire('../../../src/telemetry/logs', { + './log-collector': { + drain: logCollectorDrain + }, + '../send-data': { + sendData + } + }) + }) + + it('should drain logCollector and call sendData', () => { + logs.start(defaultConfig) + + logs.send(defaultConfig, application, host) + + expect(sendData).to.be.calledOnceWithExactly(defaultConfig, application, host, 'logs', collectedLogs) + }) + + it('should not drain logCollector and call sendData if not enabled', () => { + defaultConfig.telemetry.logCollection = false + + logs.start(defaultConfig) + + logs.send(defaultConfig, application, host) + + expect(logCollectorDrain).to.not.be.called + expect(sendData).to.not.be.called + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/iast/telemetry/log/log-collector.spec.js b/packages/dd-trace/test/telemetry/logs/log-collector.spec.js similarity index 84% rename from packages/dd-trace/test/appsec/iast/telemetry/log/log-collector.spec.js rename to packages/dd-trace/test/telemetry/logs/log-collector.spec.js index dde5f6afb65..57f634db2ef 100644 --- a/packages/dd-trace/test/appsec/iast/telemetry/log/log-collector.spec.js +++ b/packages/dd-trace/test/telemetry/logs/log-collector.spec.js @@ -1,10 +1,13 @@ -const { expect } = require('chai') -const { calculateDDBasePath } = require('../../../../../src/util') +'use strict' + +require('../../setup/tap') + +const { calculateDDBasePath } = require('../../../src/util') const ddBasePath = calculateDDBasePath(__dirname) describe('telemetry log collector', () => { - const logCollector = require('../../../../../src/appsec/iast/telemetry/log/log-collector') + const logCollector = require('../../../src/telemetry/logs/log-collector') afterEach(() => { logCollector.reset(3) @@ -25,7 +28,7 @@ describe('telemetry log collector', () => { it('should store logs with same message but different stack', () => { const ddFrame = - `at T (${ddBasePath}packages/dd-trace/test/appsec/iast/telemetry/log/log-collector.spec.js:29:21)` + `at T (${ddBasePath}packages/dd-trace/test/telemetry/logs/log-collector.spec.js:29:21)` expect(logCollector.add({ message: 'Error 1', level: 'ERROR', stack_trace: `stack 1\n${ddFrame}` })).to.be.true expect(logCollector.add({ message: 'Error 1', level: 'ERROR', stack_trace: `stack 2\n${ddFrame}` })).to.be.true expect(logCollector.add({ message: 'Error 1', level: 'ERROR', stack_trace: `stack 3\n${ddFrame}` })).to.be.true @@ -33,7 +36,7 @@ describe('telemetry log collector', () => { it('should store logs with same message, same stack but different level', () => { const ddFrame = - `at T (${ddBasePath}packages/dd-trace/test/appsec/iast/telemetry/log/log-collector.spec.js:29:21)` + `at T (${ddBasePath}packages/dd-trace/test/telemetry/logs/log-collector.spec.js:29:21)` expect(logCollector.add({ message: 'Error 1', level: 'ERROR', stack_trace: `stack 1\n${ddFrame}` })).to.be.true expect(logCollector.add({ message: 'Error 1', level: 'WARN', stack_trace: `stack 1\n${ddFrame}` })).to.be.true expect(logCollector.add({ message: 'Error 1', level: 'DEBUG', stack_trace: `stack 1\n${ddFrame}` })).to.be.true @@ -58,7 +61,7 @@ describe('telemetry log collector', () => { const logs = logCollector.drain() expect(logs.length).to.be.equal(4) - expect(logs[3]).to.deep.eq({ message: 'Omitted 2 entries due to overflowing', level: 'ERROR', tags: undefined }) + expect(logs[3]).to.deep.eq({ message: 'Omitted 2 entries due to overflowing', level: 'ERROR' }) }) }) }) From 9a533e456369307dd847d9e241043eeedd73a053 Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Tue, 31 Oct 2023 11:21:54 +0100 Subject: [PATCH 041/147] Add benchmark startup time for IAST (#3690) * Add startup time benchmark for iast * New CI image for startup benchmark * Tweak startup benchmark app * New CI image for startup benchmark * Disable rewriter * Enable rewriter * Restore CI base image * Fix linting * Remove sed command to avoid logging in insecure-bank * Move listening listener to server listen method for insecure-bank * Checkout specific commit for insecure-bank * Add appsec variants --- benchmark/sirun/Dockerfile | 11 +++++++++++ benchmark/sirun/appsec-iast/insecure-bank.js | 9 +++++++++ benchmark/sirun/appsec-iast/meta.json | 15 +++++++++++++++ benchmark/sirun/appsec/insecure-bank.js | 9 +++++++++ benchmark/sirun/appsec/meta.json | 15 +++++++++++++++ 5 files changed, 59 insertions(+) create mode 100644 benchmark/sirun/appsec-iast/insecure-bank.js create mode 100644 benchmark/sirun/appsec/insecure-bank.js diff --git a/benchmark/sirun/Dockerfile b/benchmark/sirun/Dockerfile index 743e69031d7..f212bfd662c 100644 --- a/benchmark/sirun/Dockerfile +++ b/benchmark/sirun/Dockerfile @@ -37,3 +37,14 @@ RUN mkdir -p /usr/local/nvm \ && nvm install --no-progress 20.4.0 \ && nvm alias default 18 \ && nvm use 18 + +RUN mkdir /opt/insecure-bank-js +RUN git clone --depth 1 https://github.com/hdiv/insecure-bank-js.git /opt/insecure-bank-js + +WORKDIR /opt/insecure-bank-js +RUN git checkout 2003d9085a6e9a679e31fd88719e4de030d6855f +RUN . $NVM_DIR/nvm.sh \ + && npm ci \ + && npm cache clean --force + +WORKDIR /app diff --git a/benchmark/sirun/appsec-iast/insecure-bank.js b/benchmark/sirun/appsec-iast/insecure-bank.js new file mode 100644 index 00000000000..c8930910396 --- /dev/null +++ b/benchmark/sirun/appsec-iast/insecure-bank.js @@ -0,0 +1,9 @@ +const http = require('http') +const app = require('/opt/insecure-bank-js/app') + +const { port } = require('./common') + +app.set('port', port) +const server = http.createServer(app) + +server.listen(port, () => { server.close() }) diff --git a/benchmark/sirun/appsec-iast/meta.json b/benchmark/sirun/appsec-iast/meta.json index 2908eb3cc62..2273ff73855 100644 --- a/benchmark/sirun/appsec-iast/meta.json +++ b/benchmark/sirun/appsec-iast/meta.json @@ -61,6 +61,21 @@ "DD_IAST_MAX_CONCURRENT_REQUESTS": "1000", "DD_IAST_MAX_CONTEXT_OPERATIONS": "100" } + }, + "startup-time-control": { + "run": "node --require ../../../init.js insecure-bank.js", + "run_with_affinity": "bash -c \"taskset -c $CPU_AFFINITY node --require ../../../init.js insecure-bank.js\"", + "env": { + "DD_IAST_ENABLED": "0" + } + }, + "startup-time-iast-enabled": { + "run": "node --require ../../../init.js insecure-bank.js", + "run_with_affinity": "bash -c \"taskset -c $CPU_AFFINITY node --require ../../../init.js insecure-bank.js\"", + "baseline": "startup-time-control", + "env": { + "DD_IAST_ENABLED": "1" + } } } } diff --git a/benchmark/sirun/appsec/insecure-bank.js b/benchmark/sirun/appsec/insecure-bank.js new file mode 100644 index 00000000000..c8930910396 --- /dev/null +++ b/benchmark/sirun/appsec/insecure-bank.js @@ -0,0 +1,9 @@ +const http = require('http') +const app = require('/opt/insecure-bank-js/app') + +const { port } = require('./common') + +app.set('port', port) +const server = http.createServer(app) + +server.listen(port, () => { server.close() }) diff --git a/benchmark/sirun/appsec/meta.json b/benchmark/sirun/appsec/meta.json index 9bb91ff278d..887d0509d95 100644 --- a/benchmark/sirun/appsec/meta.json +++ b/benchmark/sirun/appsec/meta.json @@ -43,6 +43,21 @@ "ATTACK_404": "1", "ATTACK_QS": "1" } + }, + "startup-time-control": { + "run": "node --require ../../../init.js insecure-bank.js", + "run_with_affinity": "bash -c \"taskset -c $CPU_AFFINITY node --require ../../../init.js insecure-bank.js\"", + "env": { + "DD_APPSEC_ENABLED": "0" + } + }, + "startup-time-appsec-enabled": { + "run": "node --require ../../../init.js insecure-bank.js", + "run_with_affinity": "bash -c \"taskset -c $CPU_AFFINITY node --require ../../../init.js insecure-bank.js\"", + "baseline": "startup-time-control", + "env": { + "DD_APPSEC_ENABLED": "1" + } } } } From 69bf3b9f28d97ebc0bd1495f769a1ddf814bfdab Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Tue, 31 Oct 2023 12:23:00 +0100 Subject: [PATCH 042/147] Check only query and body parameters in nosql injections (#3725) --- .../nosql-injection-mongodb-analyzer.js | 11 +++++++++-- ...alyzer.express-mongo-sanitize.plugin.spec.js | 17 +++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/packages/dd-trace/src/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.js index 8855ed6ff9e..83758045ece 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.js @@ -7,6 +7,7 @@ const { getNodeModulesPaths } = require('../path-line') const { getNextSecureMark } = require('../taint-tracking/secure-marks-generator') const { storage } = require('../../../../../datadog-core') const { getIastContext } = require('../iast-context') +const { HTTP_REQUEST_PARAMETER, HTTP_REQUEST_BODY } = require('../taint-tracking/source-types') const EXCLUDED_PATHS_FROM_STACK = getNodeModulesPaths('mongodb', 'mongoose') const MONGODB_NOSQL_SECURE_MARK = getNextSecureMark() @@ -113,6 +114,12 @@ class NosqlInjectionMongodbAnalyzer extends InjectionAnalyzer { }) } + _isVulnerableRange (range) { + const rangeType = range?.iinfo?.type + const isVulnerableType = rangeType === HTTP_REQUEST_PARAMETER || rangeType === HTTP_REQUEST_BODY + return isVulnerableType && (range.secureMarks & MONGODB_NOSQL_SECURE_MARK) !== MONGODB_NOSQL_SECURE_MARK + } + _isVulnerable (value, iastContext) { if (value?.filter && iastContext) { let isVulnerable = false @@ -124,13 +131,13 @@ class NosqlInjectionMongodbAnalyzer extends InjectionAnalyzer { const rangesByKey = {} const allRanges = [] - iterateObjectStrings(value.filter, function (val, nextLevelKeys) { + iterateObjectStrings(value.filter, (val, nextLevelKeys) => { const ranges = getRanges(iastContext, val) if (ranges?.length) { const filteredRanges = [] for (const range of ranges) { - if ((range.secureMarks & MONGODB_NOSQL_SECURE_MARK) !== MONGODB_NOSQL_SECURE_MARK) { + if (this._isVulnerableRange(range)) { isVulnerable = true filteredRanges.push(range) } diff --git a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js index 7b70c6f8712..e05537ce04b 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.express-mongo-sanitize.plugin.spec.js @@ -48,6 +48,14 @@ describe('nosql injection detection in mongodb - whole feature', () => { }) prepareTestServerForIastInExpress('Test without sanitization middlewares', expressVersion, + (expressApp) => { + expressApp.get('/path/:parameter', async function (req, res) { + await collection.find({ + key: req.params.parameter + }) + res.end() + }) + }, (testThatRequestHasVulnerability, testThatRequestHasNoVulnerability) => { testThatRequestHasVulnerability({ fn: async (req, res) => { @@ -73,6 +81,15 @@ describe('nosql injection detection in mongodb - whole feature', () => { } }) + testThatRequestHasNoVulnerability({ + testDescription: 'should not have NOSQL_MONGODB_INJECTION vulnerability with path params', + fn: function noop () {}, + vulnerability: 'NOSQL_MONGODB_INJECTION', + makeRequest: (done, config) => { + axios.get(`http://localhost:${config.port}/path/parameterValue`).catch(done) + } + }) + testThatRequestHasVulnerability({ testDescription: 'should have NOSQL_MONGODB_INJECTION vulnerability in correct file and line', fn: async (req, res) => { From 2e214af25797a8534e8ff160fea5076f100a35c1 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Tue, 31 Oct 2023 14:12:35 +0100 Subject: [PATCH 043/147] Fix knex nested queries (#3730) * Fix knex nested queries * Fix lint --- packages/datadog-instrumentations/src/knex.js | 41 +++++++++++-------- ...sql-injection-analyzer.knex.plugin.spec.js | 2 +- 2 files changed, 25 insertions(+), 18 deletions(-) diff --git a/packages/datadog-instrumentations/src/knex.js b/packages/datadog-instrumentations/src/knex.js index 1df88ba31a7..6c684a3e0d5 100644 --- a/packages/datadog-instrumentations/src/knex.js +++ b/packages/datadog-instrumentations/src/knex.js @@ -1,6 +1,6 @@ 'use strict' -const { addHook, channel } = require('./helpers/instrument') +const { addHook, channel, AsyncResource } = require('./helpers/instrument') const { wrapThen } = require('./helpers/promise') const shimmer = require('../../datadog-shimmer') @@ -39,34 +39,41 @@ addHook({ return raw.apply(this, arguments) } + const asyncResource = new AsyncResource('bound-anonymous-fn') + function finish () { finishRawQueryCh.publish() } - startRawQueryCh.publish({ sql, dialect: this.dialect }) + return asyncResource.runInAsyncScope(() => { + startRawQueryCh.publish({ sql, dialect: this.dialect }) - const rawResult = raw.apply(this, arguments) + const rawResult = raw.apply(this, arguments) + shimmer.wrap(rawResult, 'then', originalThen => function () { + return asyncResource.runInAsyncScope(() => { + arguments[0] = wrapCallbackWithFinish(arguments[0], finish) + if (arguments[1]) arguments[1] = wrapCallbackWithFinish(arguments[1], finish) - shimmer.wrap(rawResult, 'then', originalThen => function () { - arguments[0] = wrapCallbackWithFinish(arguments[0], finish) - arguments[1] = wrapCallbackWithFinish(arguments[1], finish) + const originalThenResult = originalThen.apply(this, arguments) - const originalThenResult = originalThen.apply(this, arguments) + shimmer.wrap(originalThenResult, 'catch', originalCatch => function () { + arguments[0] = wrapCallbackWithFinish(arguments[0], finish) + return originalCatch.apply(this, arguments) + }) - shimmer.wrap(originalThenResult, 'catch', originalCatch => function () { - arguments[0] = wrapCallbackWithFinish(arguments[0], finish) - return originalCatch.apply(this, arguments) + return originalThenResult + }) }) - return originalThenResult - }) + shimmer.wrap(rawResult, 'asCallback', originalAsCallback => function () { + return asyncResource.runInAsyncScope(() => { + arguments[0] = wrapCallbackWithFinish(arguments[0], finish) + return originalAsCallback.apply(this, arguments) + }) + }) - shimmer.wrap(rawResult, 'asCallback', originalAsCallback => function () { - arguments[0] = wrapCallbackWithFinish(arguments[0], finish) - return originalAsCallback.apply(this, arguments) + return rawResult }) - - return rawResult }) return Knex }) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js index e867a03c0f5..a5dddc6b888 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.knex.plugin.spec.js @@ -10,7 +10,7 @@ const iastContextFunctions = require('../../../../src/appsec/iast/iast-context') const { newTaintedString } = require('../../../../src/appsec/iast/taint-tracking/operations') const vulnerabilityReporter = require('../../../../src/appsec/iast/vulnerability-reporter') -describe.skip('sql-injection-analyzer with knex', () => { +describe('sql-injection-analyzer with knex', () => { withVersions('knex', 'knex', knexVersion => { if (!semver.satisfies(knexVersion, '>=2')) return From 382c5c64f36eba834933d579819bc299f8859d51 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Tue, 31 Oct 2023 12:39:06 -0400 Subject: [PATCH 044/147] fix error in http plugin when it was enabled after request start (#3740) --- packages/datadog-plugin-http/src/client.js | 2 + .../datadog-plugin-http/test/client.spec.js | 44 +++++++++++++++++++ 2 files changed, 46 insertions(+) diff --git a/packages/datadog-plugin-http/src/client.js b/packages/datadog-plugin-http/src/client.js index 015c8d28eb9..8ea210a0ba9 100644 --- a/packages/datadog-plugin-http/src/client.js +++ b/packages/datadog-plugin-http/src/client.js @@ -76,6 +76,7 @@ class HttpClientPlugin extends ClientPlugin { } finish ({ req, res, span }) { + if (!span) return if (res) { const status = res.status || res.statusCode @@ -98,6 +99,7 @@ class HttpClientPlugin extends ClientPlugin { } error ({ span, error }) { + if (!span) return if (error) { span.addTags({ [ERROR_TYPE]: error.name, diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index d7869751314..73aba7d205e 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -964,6 +964,50 @@ describe('Plugin', () => { } }) + describe('with late plugin initialization and an external subscriber', () => { + let ch + let sub + + beforeEach(() => { + return agent.load('http', { server: false }) + .then(() => { + ch = require('../../diagnostics_channel').channel('apm:http:client:request:start') + sub = () => {} + tracer = require('../../dd-trace') + http = require(protocol) + }) + }) + + afterEach(() => { + ch.unsubscribe(sub) + }) + + it('should not crash', done => { + const app = (req, res) => { + res.end() + } + + getPort().then(port => { + appListener = server(app, port, () => { + ch.subscribe(sub) + + tracer.use('http', false) + + const req = http.request(`${protocol}://localhost:${port}`, res => { + res.on('error', done) + res.on('data', () => {}) + res.on('end', () => done()) + }) + req.on('error', done) + + tracer.use('http', true) + + req.end() + }) + }) + }) + }) + describe('with service configuration', () => { let config From 72d6d7b82a30b9e58b26ca2d93eaeedcad134bb6 Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Tue, 31 Oct 2023 17:57:11 +0100 Subject: [PATCH 045/147] Handle headers with array values [APPSEC-11971] (#3751) * Handle headers with array values * Add a test with array value in the evidence * Stringify array values * Test header empty array --- .../analyzers/hsts-header-missing-analyzer.js | 7 ++-- .../iast/analyzers/missing-header-analyzer.js | 26 ++++++++++++--- .../xcontenttype-header-missing-analyzer.js | 4 +-- .../hsts-header-missing-analyzer.spec.js | 32 +++++++++++++++++++ ...ontenttype-header-missing-analyzer.spec.js | 15 +++++++++ 5 files changed, 76 insertions(+), 8 deletions(-) diff --git a/packages/dd-trace/src/appsec/iast/analyzers/hsts-header-missing-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/hsts-header-missing-analyzer.js index 87e79e98c8c..a5196d3c92f 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/hsts-header-missing-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/hsts-header-missing-analyzer.js @@ -10,8 +10,11 @@ class HstsHeaderMissingAnalyzer extends MissingHeaderAnalyzer { super(HSTS_HEADER_MISSING, HSTS_HEADER_NAME) } _isVulnerableFromRequestAndResponse (req, res) { - const headerToCheck = res.getHeader(HSTS_HEADER_NAME) - return !this._isHeaderValid(headerToCheck) && this._isHttpsProtocol(req) + const headerValues = this._getHeaderValues(res, HSTS_HEADER_NAME) + return this._isHttpsProtocol(req) && ( + headerValues.length === 0 || + headerValues.some(headerValue => !this._isHeaderValid(headerValue)) + ) } _isHeaderValid (headerValue) { diff --git a/packages/dd-trace/src/appsec/iast/analyzers/missing-header-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/missing-header-analyzer.js index 16578b5e427..035a6de1dcc 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/missing-header-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/missing-header-analyzer.js @@ -28,6 +28,15 @@ class MissingHeaderAnalyzer extends Analyzer { }, (data) => this.analyze(data)) } + _getHeaderValues (res, headerName) { + const headerValue = res.getHeader(headerName) + if (Array.isArray(headerValue)) { + return headerValue + } else { + return headerValue ? [headerValue.toString()] : [] + } + } + _getLocation () { return undefined } @@ -41,7 +50,14 @@ class MissingHeaderAnalyzer extends Analyzer { } _getEvidence ({ res }) { - return { value: res.getHeader(this.headerName) } + const headerValues = this._getHeaderValues(res, this.headerName) + let value + if (headerValues.length === 1) { + value = headerValues[0] + } else if (headerValues.length > 0) { + value = JSON.stringify(headerValues) + } + return { value } } _isVulnerable ({ req, res }, context) { @@ -56,9 +72,11 @@ class MissingHeaderAnalyzer extends Analyzer { } _isResponseHtml (res) { - const contentType = res.getHeader('content-type') - return contentType && HTML_CONTENT_TYPES.some(htmlContentType => { - return htmlContentType === contentType || contentType.startsWith(htmlContentType + ';') + const contentTypes = this._getHeaderValues(res, 'content-type') + return contentTypes.some(contentType => { + return contentType && HTML_CONTENT_TYPES.some(htmlContentType => { + return htmlContentType === contentType || contentType.startsWith(htmlContentType + ';') + }) }) } } diff --git a/packages/dd-trace/src/appsec/iast/analyzers/xcontenttype-header-missing-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/xcontenttype-header-missing-analyzer.js index 0d10a8952df..6d114d0b168 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/xcontenttype-header-missing-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/xcontenttype-header-missing-analyzer.js @@ -11,8 +11,8 @@ class XcontenttypeHeaderMissingAnalyzer extends MissingHeaderAnalyzer { } _isVulnerableFromRequestAndResponse (req, res) { - const headerToCheck = res.getHeader(XCONTENTTYPEOPTIONS_HEADER_NAME) - return !headerToCheck || headerToCheck.trim().toLowerCase() !== 'nosniff' + const headerValues = this._getHeaderValues(res, XCONTENTTYPEOPTIONS_HEADER_NAME) + return headerValues.length === 0 || headerValues.some(headerValue => headerValue.trim().toLowerCase() !== 'nosniff') } } diff --git a/packages/dd-trace/test/appsec/iast/analyzers/hsts-header-missing-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/hsts-header-missing-analyzer.spec.js index f39a537c40e..9d54fb6cc58 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/hsts-header-missing-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/hsts-header-missing-analyzer.spec.js @@ -72,11 +72,43 @@ describe('hsts header missing analyzer', () => { expect(vulnerabilities[0].hash).to.be.equals(analyzer._createHash('HSTS_HEADER_MISSING:mocha')) }, makeRequestWithXFordwardedProtoHeader) + testThatRequestHasVulnerability((req, res) => { + res.setHeader('content-type', ['text/html']) + res.setHeader('Strict-Transport-Security', 'invalid') + res.end('

Test

') + }, HSTS_HEADER_MISSING, 1, function (vulnerabilities) { + expect(vulnerabilities[0].evidence.value).to.be.equal('invalid') + expect(vulnerabilities[0].hash).to.be.equals(analyzer._createHash('HSTS_HEADER_MISSING:mocha')) + }, makeRequestWithXFordwardedProtoHeader) + + testThatRequestHasVulnerability((req, res) => { + res.setHeader('content-type', ['text/html']) + res.setHeader('Strict-Transport-Security', []) + res.end('

Test

') + }, HSTS_HEADER_MISSING, 1, function (vulnerabilities) { + expect(vulnerabilities[0].evidence).to.be.undefined + expect(vulnerabilities[0].hash).to.be.equals(analyzer._createHash('HSTS_HEADER_MISSING:mocha')) + }, makeRequestWithXFordwardedProtoHeader) + + testThatRequestHasVulnerability((req, res) => { + res.setHeader('content-type', ['text/html']) + res.setHeader('Strict-Transport-Security', ['invalid1', 'invalid2']) + res.end('

Test

') + }, HSTS_HEADER_MISSING, 1, function (vulnerabilities) { + expect(vulnerabilities[0].evidence.value).to.be.equal(JSON.stringify(['invalid1', 'invalid2'])) + expect(vulnerabilities[0].hash).to.be.equals(analyzer._createHash('HSTS_HEADER_MISSING:mocha')) + }, makeRequestWithXFordwardedProtoHeader) + testThatRequestHasNoVulnerability((req, res) => { res.setHeader('content-type', 'application/json') res.end('{"key": "test}') }, HSTS_HEADER_MISSING, makeRequestWithXFordwardedProtoHeader) + testThatRequestHasNoVulnerability((req, res) => { + res.setHeader('content-type', ['application/json']) + res.end('{"key": "test}') + }, HSTS_HEADER_MISSING, makeRequestWithXFordwardedProtoHeader) + testThatRequestHasNoVulnerability((req, res) => { res.setHeader('content-type', 'text/html') res.setHeader('Strict-Transport-Security', 'max-age=100') diff --git a/packages/dd-trace/test/appsec/iast/analyzers/xcontenttype-header-missing-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/xcontenttype-header-missing-analyzer.spec.js index b3564e02764..94b19d6efd1 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/xcontenttype-header-missing-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/xcontenttype-header-missing-analyzer.spec.js @@ -45,6 +45,15 @@ describe('xcontenttype header missing analyzer', () => { expect(vulnerabilities[0].hash).to.be.equals(analyzer._createHash('XCONTENTTYPE_HEADER_MISSING:mocha')) }) + testThatRequestHasVulnerability((req, res) => { + res.setHeader('content-type', ['text/html']) + res.setHeader('X-Content-Type-Options', 'whatever') + res.end('

Test

') + }, XCONTENTTYPE_HEADER_MISSING, 1, function (vulnerabilities) { + expect(vulnerabilities[0].evidence.value).to.be.equal('whatever') + expect(vulnerabilities[0].hash).to.be.equals(analyzer._createHash('XCONTENTTYPE_HEADER_MISSING:mocha')) + }) + testThatRequestHasNoVulnerability((req, res) => { res.setHeader('content-type', 'application/json') res.end('{"key": "test}') @@ -55,5 +64,11 @@ describe('xcontenttype header missing analyzer', () => { res.setHeader('X-Content-Type-Options', 'nosniff') res.end('{"key": "test}') }, XCONTENTTYPE_HEADER_MISSING) + + testThatRequestHasNoVulnerability((req, res) => { + res.setHeader('content-type', ['text/html']) + res.setHeader('X-Content-Type-Options', 'nosniff') + res.end('{"key": "test}') + }, XCONTENTTYPE_HEADER_MISSING) }) }) From 5715611dfbc264d6c5714afaf4f0b804a8840164 Mon Sep 17 00:00:00 2001 From: Crystal Magloire Date: Wed, 1 Nov 2023 11:25:44 -0400 Subject: [PATCH 046/147] NextJS error handling (#3715) Adding support for Error handling with Next when using nextRequest. The error stack is now being added to the span and root span. --------- Co-authored-by: Sam Brenner Co-authored-by: Sam Brenner <106700075+sabrenner@users.noreply.github.com> --- packages/datadog-instrumentations/src/next.js | 24 +++++++++++++++++ packages/datadog-plugin-next/src/index.js | 5 ++-- .../test/app/api/appDir/error/route.js | 13 ++++++++++ .../datadog-plugin-next/test/index.spec.js | 26 ++++++++++++++++++- 4 files changed, 65 insertions(+), 3 deletions(-) create mode 100644 packages/datadog-plugin-next/test/app/api/appDir/error/route.js diff --git a/packages/datadog-instrumentations/src/next.js b/packages/datadog-instrumentations/src/next.js index a418477220a..21a9f1187e3 100644 --- a/packages/datadog-instrumentations/src/next.js +++ b/packages/datadog-instrumentations/src/next.js @@ -12,6 +12,7 @@ const bodyParsedChannel = channel('apm:next:body-parsed') const queryParsedChannel = channel('apm:next:query-parsed') const requests = new WeakSet() +const nodeNextRequestsToNextRequests = new WeakMap() const MIDDLEWARE_HEADER = 'x-middleware-invoke' @@ -156,6 +157,11 @@ function finish (ctx, result, err) { errorChannel.publish(ctx) } + const maybeNextRequest = nodeNextRequestsToNextRequests.get(ctx.req) + if (maybeNextRequest) { + ctx.nextRequest = maybeNextRequest + } + finishChannel.publish(ctx) if (err) { @@ -165,6 +171,24 @@ function finish (ctx, result, err) { return result } +// also wrapped in dist/server/future/route-handlers/app-route-route-handler.js +// in versions below 13.3.0 that support middleware, +// however, it is not provided as a class function or exported property +addHook({ + name: 'next', + versions: ['>=13.3.0'], + file: 'dist/server/web/spec-extension/adapters/next-request.js' +}, NextRequestAdapter => { + shimmer.wrap(NextRequestAdapter.NextRequestAdapter, 'fromNodeNextRequest', fromNodeNextRequest => { + return function (nodeNextRequest) { + const nextRequest = fromNodeNextRequest.apply(this, arguments) + nodeNextRequestsToNextRequests.set(nodeNextRequest.originalRequest, nextRequest) + return nextRequest + } + }) + return NextRequestAdapter +}) + addHook({ name: 'next', versions: ['>=11.1'], diff --git a/packages/datadog-plugin-next/src/index.js b/packages/datadog-plugin-next/src/index.js index b05bda981e0..d0691bb2977 100644 --- a/packages/datadog-plugin-next/src/index.js +++ b/packages/datadog-plugin-next/src/index.js @@ -43,7 +43,7 @@ class NextPlugin extends ServerPlugin { this.addError(error, span) } - finish ({ req, res }) { + finish ({ req, res, nextRequest = {} }) { const store = storage.getStore() if (!store) return @@ -52,7 +52,8 @@ class NextPlugin extends ServerPlugin { const error = span.context()._tags['error'] if (!this.config.validateStatus(res.statusCode) && !error) { - span.setTag('error', true) + span.setTag('error', req.error || nextRequest.error || true) + web.addError(req, req.error || nextRequest.error || true) } span.addTags({ diff --git a/packages/datadog-plugin-next/test/app/api/appDir/error/route.js b/packages/datadog-plugin-next/test/app/api/appDir/error/route.js new file mode 100644 index 00000000000..e7ef96b0a31 --- /dev/null +++ b/packages/datadog-plugin-next/test/app/api/appDir/error/route.js @@ -0,0 +1,13 @@ +import { NextResponse } from 'next/server' + +export async function GET (req) { + let status = 200 + try { + throw new Error('error in app dir api route') + } catch (error) { + req.error = error + status = 500 + } + + return NextResponse.json({}, { status }) +} diff --git a/packages/datadog-plugin-next/test/index.spec.js b/packages/datadog-plugin-next/test/index.spec.js index 9d3718b63f7..afee2b19817 100644 --- a/packages/datadog-plugin-next/test/index.spec.js +++ b/packages/datadog-plugin-next/test/index.spec.js @@ -59,7 +59,7 @@ describe('Plugin', function () { // additionally, next.js sets timeouts in 10.x when displaying extra logs // https://github.com/vercel/next.js/blob/v10.2.0/packages/next/server/next.ts#L132-L133 - setTimeout(done, 100) // relatively high timeout chosen to be safe + setTimeout(done, 700) // relatively high timeout chosen to be safe }) server.stderr.on('data', chunk => process.stderr.write(chunk)) server.stdout.on('data', chunk => process.stdout.write(chunk)) @@ -467,6 +467,30 @@ describe('Plugin', function () { .get(`http://127.0.0.1:${port}/api/hello/world`) .catch(done) }) + + if (satisfies(pkg.version, '>=13.3.0')) { + it('should attach the error to the span from a NextRequest', done => { + agent + .use(traces => { + const spans = traces[0] + + expect(spans[1]).to.have.property('name', 'next.request') + expect(spans[1]).to.have.property('error', 1) + + expect(spans[1].meta).to.have.property('error.message', 'error in app dir api route') + expect(spans[1].meta).to.have.property('error.type', 'Error') + expect(spans[1].meta['error.stack']).to.exist + }) + .then(done) + .catch(done) + + axios + .get(`http://127.0.0.1:${port}/api/appDir/error`) + .catch(err => { + if (err.response.status !== 500) done(err) + }) + }) + } }) // Issue with 13.4.13 - 13.4.18 causes process.env not to work properly in standalone mode From 8c328ce45b1bb1bd2a054ac6cc1ae43366e2431c Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Wed, 1 Nov 2023 22:26:52 +0100 Subject: [PATCH 047/147] Unsubscribe instead of subscribe (#3756) --- packages/dd-trace/src/profiling/profilers/wall.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index 57813aed103..20b832d0879 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -223,7 +223,7 @@ class NativeWallProfiler { const profile = this._stop(false) if (this._codeHotspotsEnabled) { beforeCh.unsubscribe(this._enter) - enterCh.subscribe(this._enter) + enterCh.unsubscribe(this._enter) this._profilerState = undefined } From e1dc3b2cf8dcfc08762bbd57b7e57fe6f41f2196 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Thu, 2 Nov 2023 10:16:26 +0100 Subject: [PATCH 048/147] PROF-8461: make endpoint profiling independent of code hotspots (#3727) --- packages/dd-trace/src/profiling/config.js | 20 +---- .../dd-trace/src/profiling/profilers/wall.js | 73 +++++++++---------- .../dd-trace/test/profiling/config.spec.js | 61 ---------------- 3 files changed, 39 insertions(+), 115 deletions(-) diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 2c4856f5c59..7860129c591 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -128,24 +128,10 @@ class Config { ? options.profilers : getProfilers({ DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, DD_PROFILING_PROFILERS }) - function getCodeHotspotsOptionsOr (defvalue) { - return coalesce(options.codeHotspotsEnabled, - DD_PROFILING_CODEHOTSPOTS_ENABLED, - DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED, defvalue) - } - this.codeHotspotsEnabled = isTrue(getCodeHotspotsOptionsOr(false)) + this.codeHotspotsEnabled = isTrue(coalesce(options.codeHotspotsEnabled, + DD_PROFILING_CODEHOTSPOTS_ENABLED, + DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED, false)) logExperimentalVarDeprecation('CODEHOTSPOTS_ENABLED') - if (this.endpointCollectionEnabled && !this.codeHotspotsEnabled) { - if (getCodeHotspotsOptionsOr(undefined) !== undefined) { - this.logger.warn( - 'Endpoint collection is enabled, but Code Hotspots are disabled. ' + - 'Enable Code Hotspots too for endpoint collection to work.') - this.endpointCollectionEnabled = false - } else { - this.logger.info('Code Hotspots are implicitly enabled by endpoint collection.') - this.codeHotspotsEnabled = true - } - } this.profilers = ensureProfilers(profilers, this) } diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index 20b832d0879..ff4cf7d1570 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -64,29 +64,6 @@ function endpointNameFromTags (tags) { ].filter(v => v).join(' ') } -function updateContext (context, span, startedSpans, endpointCollectionEnabled) { - context.spanId = span.context().toSpanId() - const rootSpan = startedSpans[0] - if (rootSpan) { - context.rootSpanId = rootSpan.context().toSpanId() - if (endpointCollectionEnabled) { - // Find the first webspan starting from the end: - // There might be several webspans, for example with next.js, http plugin creates a first span - // and then next.js plugin creates a child span, and this child span haves the correct endpoint information. - for (let i = startedSpans.length - 1; i >= 0; i--) { - const tags = getSpanContextTags(startedSpans[i]) - if (isWebServerSpan(tags)) { - context.webTags = tags - // endpoint may not be determined yet, but keep it as fallback - // if tags are not available anymore during serialization - context.endpoint = endpointNameFromTags(tags) - break - } - } - } - } -} - class NativeWallProfiler { constructor (options = {}) { this.type = 'wall' @@ -94,6 +71,7 @@ class NativeWallProfiler { this._flushIntervalMillis = options.flushInterval || 60 * 1e3 // 60 seconds this._codeHotspotsEnabled = !!options.codeHotspotsEnabled this._endpointCollectionEnabled = !!options.endpointCollectionEnabled + this._withContexts = this._codeHotspotsEnabled || this._endpointCollectionEnabled this._v8ProfilerBugWorkaroundEnabled = !!options.v8ProfilerBugWorkaroundEnabled this._mapper = undefined this._pprof = undefined @@ -115,12 +93,6 @@ class NativeWallProfiler { start ({ mapper } = {}) { if (this._started) return - if (this._codeHotspotsEnabled && !this._emittedFFMessage && this._logger) { - this._logger.debug( - `Wall profiler: Enable trace_show_breakdown_profiling_for_node feature flag to see code hotspots.`) - this._emittedFFMessage = true - } - this._mapper = mapper this._pprof = require('@datadog/pprof') kSampleCount = this._pprof.time.constants.kSampleCount @@ -137,12 +109,12 @@ class NativeWallProfiler { intervalMicros: this._samplingIntervalMicros, durationMillis: this._flushIntervalMillis, sourceMapper: this._mapper, - withContexts: this._codeHotspotsEnabled, + withContexts: this._withContexts, lineNumbers: false, workaroundV8Bug: this._v8ProfilerBugWorkaroundEnabled }) - if (this._codeHotspotsEnabled) { + if (this._withContexts) { this._profilerState = this._pprof.time.getState() this._currentContext = {} this._pprof.time.setContext(this._currentContext) @@ -167,9 +139,7 @@ class NativeWallProfiler { this._currentContext = {} this._pprof.time.setContext(this._currentContext) - if (this._lastSpan) { - updateContext(context, this._lastSpan, this._lastStartedSpans, this._endpointCollectionEnabled) - } + this._updateContext(context) } const span = getActiveSpan() @@ -182,6 +152,35 @@ class NativeWallProfiler { } } + _updateContext (context) { + if (!this._lastSpan) { + return + } + if (this._codeHotspotsEnabled) { + context.spanId = this._lastSpan.context().toSpanId() + const rootSpan = this._lastStartedSpans[0] + if (rootSpan) { + context.rootSpanId = rootSpan.context().toSpanId() + } + } + if (this._endpointCollectionEnabled) { + const startedSpans = this._lastStartedSpans + // Find the first webspan starting from the end: + // There might be several webspans, for example with next.js, http plugin creates a first span + // and then next.js plugin creates a child span, and this child span haves the correct endpoint information. + for (let i = startedSpans.length - 1; i >= 0; i--) { + const tags = getSpanContextTags(startedSpans[i]) + if (isWebServerSpan(tags)) { + context.webTags = tags + // endpoint may not be determined yet, but keep it as fallback + // if tags are not available anymore during serialization + context.endpoint = endpointNameFromTags(tags) + break + } + } + } + } + _reportV8bug (maybeBug) { const tag = `v8_profiler_bug_workaround_enabled:${this._v8ProfilerBugWorkaroundEnabled}` const metric = `v8_cpu_profiler${maybeBug ? '_maybe' : ''}_stuck_event_loop` @@ -194,12 +193,12 @@ class NativeWallProfiler { _stop (restart) { if (!this._started) return - if (this._codeHotspotsEnabled) { + if (this._withContexts) { // update last sample context if needed this._enter() this._lastSampleCount = 0 } - const profile = this._pprof.time.stop(restart, this._codeHotspotsEnabled ? generateLabels : undefined) + const profile = this._pprof.time.stop(restart, this._withContexts ? generateLabels : undefined) if (restart) { const v8BugDetected = this._pprof.time.v8ProfilerStuckEventLoopDetected() if (v8BugDetected !== 0) { @@ -221,7 +220,7 @@ class NativeWallProfiler { if (!this._started) return const profile = this._stop(false) - if (this._codeHotspotsEnabled) { + if (this._withContexts) { beforeCh.unsubscribe(this._enter) enterCh.unsubscribe(this._enter) this._profilerState = undefined diff --git a/packages/dd-trace/test/profiling/config.spec.js b/packages/dd-trace/test/profiling/config.spec.js index 9becc3a0429..483b078a7f8 100644 --- a/packages/dd-trace/test/profiling/config.spec.js +++ b/packages/dd-trace/test/profiling/config.spec.js @@ -237,67 +237,6 @@ describe('config', () => { expect(config.profilers[0].endpointCollectionEnabled()).false }) - it('should implicitly turn on code hotspots for endpoint profiling when they are not explicitly disabled', () => { - process.env = { - DD_PROFILING_PROFILERS: 'wall', - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '1' - } - const infos = [] - const options = { - logger: { - debug () {}, - info (info) { - infos.push(info) - }, - warn () {}, - error () {} - } - } - - const config = new Config(options) - - expect(infos.length).to.equal(1) - expect(infos[0]).to.equal('Code Hotspots are implicitly enabled by endpoint collection.') - - expect(config.profilers).to.be.an('array') - expect(config.profilers.length).to.equal(1) - expect(config.profilers[0]).to.be.an.instanceOf(WallProfiler) - expect(config.profilers[0].codeHotspotsEnabled()).true - expect(config.profilers[0].endpointCollectionEnabled()).true - }) - - it('should warn about code hotspots being explicitly disabled with endpoint profiling', () => { - process.env = { - DD_PROFILING_PROFILERS: 'wall', - DD_PROFILING_CODEHOTSPOTS_ENABLED: '0', - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '1' - } - const warnings = [] - const options = { - logger: { - debug () {}, - info () {}, - warn (warning) { - warnings.push(warning) - }, - error () {} - } - } - - const config = new Config(options) - - expect(warnings.length).to.equal(1) - expect(warnings[0]).to.equal( - 'Endpoint collection is enabled, but Code Hotspots are disabled. ' + - 'Enable Code Hotspots too for endpoint collection to work.') - - expect(config.profilers).to.be.an('array') - expect(config.profilers.length).to.equal(1) - expect(config.profilers[0]).to.be.an.instanceOf(WallProfiler) - expect(config.profilers[0].codeHotspotsEnabled()).false - expect(config.profilers[0].endpointCollectionEnabled()).false - }) - it('should support tags', () => { const tags = { env: 'dev' From ebfcc3fb23f4799350f79e528ee0d2d5e01baeaf Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Thu, 2 Nov 2023 13:08:14 +0100 Subject: [PATCH 049/147] Upgrade rewriter version to 2.2.1 (#3760) --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 83c6ced1542..8b9ca4de38f 100644 --- a/package.json +++ b/package.json @@ -69,7 +69,7 @@ }, "dependencies": { "@datadog/native-appsec": "^4.0.0", - "@datadog/native-iast-rewriter": "2.2.0", + "@datadog/native-iast-rewriter": "2.2.1", "@datadog/native-iast-taint-tracking": "1.6.3", "@datadog/native-metrics": "^2.0.0", "@datadog/pprof": "4.0.1", diff --git a/yarn.lock b/yarn.lock index 2d3b7eea1d7..ab11d83c275 100644 --- a/yarn.lock +++ b/yarn.lock @@ -392,10 +392,10 @@ dependencies: node-gyp-build "^3.9.0" -"@datadog/native-iast-rewriter@2.2.0": - version "2.2.0" - resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.0.tgz#cfcdfaf128450f7d92a840eee8cd030b9746f49c" - integrity sha512-YrCgLGvOQh3EkWYjqZKpelg60idtMcC/jWskZSdr4KxvF61BM9zp5NF6HeUKON6RHCmqDqFS3wyj1NNRMID1VQ== +"@datadog/native-iast-rewriter@2.2.1": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.1.tgz#3c74c5a8caa0b876e091e9c5a95256add0d73e1c" + integrity sha512-DyZlE8gNa5AoOFNKGRJU4RYF/Y/tJzv4bIAMuVBbEnMA0xhiIYqpYQG8T3OKkALl3VSEeBMjYwuOR2fCrJ6gzA== dependencies: lru-cache "^7.14.0" node-gyp-build "^4.5.0" From f2b412ed9442487c72df5e20ff51fd9296d44035 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Thu, 2 Nov 2023 13:33:37 +0100 Subject: [PATCH 050/147] Fix ci for mongoose in node 14 (#3758) --- .../test/mongoose.spec.js | 26 ++++++++++++------- .../test/index.spec.js | 4 +++ .../test/integration-test/client.spec.js | 4 +++ ...n-mongodb-analyzer.mongoose.plugin.spec.js | 6 ++++- 4 files changed, 29 insertions(+), 11 deletions(-) diff --git a/packages/datadog-instrumentations/test/mongoose.spec.js b/packages/datadog-instrumentations/test/mongoose.spec.js index 28fdf4764f1..adaae709c82 100644 --- a/packages/datadog-instrumentations/test/mongoose.spec.js +++ b/packages/datadog-instrumentations/test/mongoose.spec.js @@ -3,6 +3,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const { channel } = require('../src/helpers/instrument') const semver = require('semver') +const { NODE_MAJOR } = require('../../../version') const startCh = channel('datadog:mongoose:model:filter:start') const finishCh = channel('datadog:mongoose:model:filter:finish') @@ -14,6 +15,9 @@ describe('mongoose instrumentations', () => { iterationRanges.forEach(range => { describe(range, () => { withVersions('mongoose', ['mongoose'], range, (version) => { + const specificVersion = require(`../../../versions/mongoose@${version}`).version() + if (NODE_MAJOR === 14 && semver.satisfies(specificVersion, '>=8')) return + let Test, dbName, id, mongoose function connect () { @@ -129,15 +133,17 @@ describe('mongoose instrumentations', () => { }) } - it('continue working as expected with promise', (done) => { - Test.count({ type: 'test' }).then((res) => { - expect(res).to.be.equal(3) + if (!semver.satisfies(specificVersion, '>=8')) { + // Model.count method removed from mongoose 8.0.0 + it('continue working as expected with promise', (done) => { + Test.count({ type: 'test' }).then((res) => { + expect(res).to.be.equal(3) - done() + done() + }) }) - }) - - testCallbacksCalled('count', [{ type: 'test' }]) + testCallbacksCalled('count', [{ type: 'test' }]) + } }) if (semver.intersects(version, '>=6')) { @@ -164,8 +170,8 @@ describe('mongoose instrumentations', () => { testCallbacksCalled('countDocuments', [{ type: 'test' }]) }) } - - if (semver.intersects(version, '>=5')) { + if (semver.intersects(version, '>=5') && semver.satisfies(specificVersion, '<8')) { + // Model.count method removed from mongoose 8.0.0 describe('deleteOne', () => { if (range !== '>=7') { it('continue working as expected with cb', (done) => { @@ -243,7 +249,7 @@ describe('mongoose instrumentations', () => { testCallbacksCalled('findOne', [{ type: 'test' }]) }) - if (semver.intersects(version, '>=6')) { + if (semver.intersects(version, '>=6') && semver.satisfies(specificVersion, '<8')) { describe('findOneAndDelete', () => { if (range !== '>=7') { it('continue working as expected with cb', (done) => { diff --git a/packages/datadog-plugin-mongoose/test/index.spec.js b/packages/datadog-plugin-mongoose/test/index.spec.js index ed0da6a0206..305ddeca31c 100644 --- a/packages/datadog-plugin-mongoose/test/index.spec.js +++ b/packages/datadog-plugin-mongoose/test/index.spec.js @@ -2,6 +2,7 @@ const semver = require('semver') const agent = require('../../dd-trace/test/plugins/agent') +const { NODE_MAJOR } = require('../../../version') describe('Plugin', () => { let id @@ -10,6 +11,9 @@ describe('Plugin', () => { describe('mongoose', () => { withVersions('mongoose', ['mongoose'], (version) => { + const specificVersion = require(`../../../versions/mongoose@${version}`).version() + if (NODE_MAJOR === 14 && semver.satisfies(specificVersion, '>=8')) return + let mongoose // This needs to be called synchronously right before each test to make diff --git a/packages/datadog-plugin-mongoose/test/integration-test/client.spec.js b/packages/datadog-plugin-mongoose/test/integration-test/client.spec.js index e91d4885edf..d393fdd774b 100644 --- a/packages/datadog-plugin-mongoose/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-mongoose/test/integration-test/client.spec.js @@ -8,6 +8,7 @@ const { } = require('../../../../integration-tests/helpers') const { assert } = require('chai') const { NODE_MAJOR } = require('../../../../version') +const semver = require('semver') // newer packages are not supported on older node versions const range = NODE_MAJOR < 16 ? '<5' : '>=4' @@ -18,6 +19,9 @@ describe('esm', () => { let sandbox withVersions('mongoose', ['mongoose'], range, version => { + const specificVersion = require(`../../../../versions/mongoose@${version}`).version() + if (NODE_MAJOR === 14 && semver.satisfies(specificVersion, '>=8')) return + before(async function () { this.timeout(20000) sandbox = await createSandbox([`'mongoose@${version}'`], false, [ diff --git a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js index 624bb26617f..787f737c156 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/nosql-injection-mongodb-analyzer.mongoose.plugin.spec.js @@ -7,10 +7,14 @@ const semver = require('semver') const os = require('os') const path = require('path') const fs = require('fs') +const { NODE_MAJOR } = require('../../../../../../version') describe('nosql injection detection in mongodb - whole feature', () => { withVersions('express', 'express', '>4.18.0', expressVersion => { withVersions('mongoose', 'mongoose', '>4.0.0', mongooseVersion => { + const specificMongooseVersion = require(`../../../../../../versions/mongoose@${mongooseVersion}`).version() + if (NODE_MAJOR === 14 && semver.satisfies(specificMongooseVersion, '>=8')) return + const vulnerableMethodFilename = 'mongoose-vulnerable-method.js' let mongoose, Test, tmpFilePath @@ -106,7 +110,7 @@ describe('nosql injection detection in mongodb - whole feature', () => { } }) - if (semver.satisfies(mongooseVersion, '>=6')) { + if (semver.satisfies(specificMongooseVersion, '>=6')) { testThatRequestHasNoVulnerability({ testDescription: 'should not have NOSQL_MONGODB_INJECTION vulnerability with mongoose.sanitizeFilter', fn: async (req, res) => { From ef883d1065dac54ab460537747a71fe69899ec4d Mon Sep 17 00:00:00 2001 From: William Conti <58711692+wconti27@users.noreply.github.com> Date: Fri, 3 Nov 2023 12:13:04 -0400 Subject: [PATCH 051/147] add payload size to kafka stats (#3734) add payloadSize attribute to emitted DSM stats for Kafka --- package.json | 2 +- .../datadog-plugin-kafkajs/src/consumer.js | 6 +- .../datadog-plugin-kafkajs/src/producer.js | 15 ++-- .../datadog-plugin-kafkajs/test/index.spec.js | 28 ++++++- .../dd-trace/src/datastreams/processor.js | 66 ++++++++++++---- packages/dd-trace/src/tracer.js | 6 +- .../test/datastreams/encoding.spec.js | 2 + .../dd-trace/test/datastreams/pathway.spec.js | 2 + .../test/datastreams/processor.spec.js | 76 ++++++++++++++++++- 9 files changed, 174 insertions(+), 29 deletions(-) diff --git a/package.json b/package.json index 8b9ca4de38f..2f11b62aa05 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,7 @@ "test:appsec:ci": "nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" --exclude \"packages/dd-trace/test/appsec/**/*.plugin.spec.js\" -- npm run test:appsec", "test:appsec:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/dd-trace/test/appsec/**/*.@($(echo $PLUGINS)).plugin.spec.js\"", "test:appsec:plugins:ci": "yarn services && nyc --no-clean --include \"packages/dd-trace/src/appsec/**/*.js\" -- npm run test:appsec:plugins", - "test:trace:core": "tap packages/dd-trace/test/*.spec.js \"packages/dd-trace/test/{ci-visibility,encode,exporters,opentelemetry,opentracing,plugins,service-naming,telemetry}/**/*.spec.js\"", + "test:trace:core": "tap packages/dd-trace/test/*.spec.js \"packages/dd-trace/test/{ci-visibility,datastreams,encode,exporters,opentelemetry,opentracing,plugins,service-naming,telemetry}/**/*.spec.js\"", "test:trace:core:ci": "npm run test:trace:core -- --coverage --nyc-arg=--include=\"packages/dd-trace/src/**/*.js\"", "test:instrumentations": "mocha --colors -r 'packages/dd-trace/test/setup/mocha.js' 'packages/datadog-instrumentations/test/**/*.spec.js'", "test:instrumentations:ci": "nyc --no-clean --include 'packages/datadog-instrumentations/src/**/*.js' -- npm run test:instrumentations", diff --git a/packages/datadog-plugin-kafkajs/src/consumer.js b/packages/datadog-plugin-kafkajs/src/consumer.js index d643377b624..83102674e04 100644 --- a/packages/datadog-plugin-kafkajs/src/consumer.js +++ b/packages/datadog-plugin-kafkajs/src/consumer.js @@ -1,5 +1,6 @@ 'use strict' +const { getMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor') const ConsumerPlugin = require('../../dd-trace/src/plugins/consumer') class KafkajsConsumerPlugin extends ConsumerPlugin { @@ -8,9 +9,10 @@ class KafkajsConsumerPlugin extends ConsumerPlugin { start ({ topic, partition, message, groupId }) { if (this.config.dsmEnabled) { - this.tracer.decodeDataStreamsContext(message.headers['dd-pathway-ctx']) + const payloadSize = getMessageSize(message) + this.tracer.decodeDataStreamsContext(message.headers[CONTEXT_PROPAGATION_KEY]) this.tracer - .setCheckpoint(['direction:in', `group:${groupId}`, `topic:${topic}`, 'type:kafka']) + .setCheckpoint(['direction:in', `group:${groupId}`, `topic:${topic}`, 'type:kafka'], payloadSize) } const childOf = extract(this.tracer, message.headers) this.startSpan({ diff --git a/packages/datadog-plugin-kafkajs/src/producer.js b/packages/datadog-plugin-kafkajs/src/producer.js index d2a272a234b..b7ac9344931 100644 --- a/packages/datadog-plugin-kafkajs/src/producer.js +++ b/packages/datadog-plugin-kafkajs/src/producer.js @@ -2,6 +2,8 @@ const ProducerPlugin = require('../../dd-trace/src/plugins/producer') const { encodePathwayContext } = require('../../dd-trace/src/datastreams/pathway') +const { getMessageSize, CONTEXT_PROPAGATION_KEY } = require('../../dd-trace/src/datastreams/processor') + const BOOTSTRAP_SERVERS_KEY = 'messaging.kafka.bootstrap.servers' class KafkajsProducerPlugin extends ProducerPlugin { @@ -11,11 +13,6 @@ class KafkajsProducerPlugin extends ProducerPlugin { start ({ topic, messages, bootstrapServers }) { let pathwayCtx - if (this.config.dsmEnabled) { - const dataStreamsContext = this.tracer - .setCheckpoint(['direction:out', `topic:${topic}`, 'type:kafka']) - pathwayCtx = encodePathwayContext(dataStreamsContext) - } const span = this.startSpan({ resource: topic, meta: { @@ -31,8 +28,14 @@ class KafkajsProducerPlugin extends ProducerPlugin { } for (const message of messages) { if (typeof message === 'object') { - if (this.config.dsmEnabled) message.headers['dd-pathway-ctx'] = pathwayCtx this.tracer.inject(span, 'text_map', message.headers) + if (this.config.dsmEnabled) { + const payloadSize = getMessageSize(message) + const dataStreamsContext = this.tracer + .setCheckpoint(['direction:out', `topic:${topic}`, 'type:kafka'], payloadSize) + pathwayCtx = encodePathwayContext(dataStreamsContext) + message.headers[CONTEXT_PROPAGATION_KEY] = pathwayCtx + } } } } diff --git a/packages/datadog-plugin-kafkajs/test/index.spec.js b/packages/datadog-plugin-kafkajs/test/index.spec.js index bd62904ccd9..db6b21cc5a3 100644 --- a/packages/datadog-plugin-kafkajs/test/index.spec.js +++ b/packages/datadog-plugin-kafkajs/test/index.spec.js @@ -8,7 +8,7 @@ const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/c const { expectedSchema, rawExpectedSchema } = require('./naming') const DataStreamsContext = require('../../dd-trace/src/data_streams_context') const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') -const { ENTRY_PARENT_HASH } = require('../../dd-trace/src/datastreams/processor') +const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor') describe('Plugin', () => { describe('kafkajs', function () { @@ -299,6 +299,32 @@ describe('Plugin', () => { }) setDataStreamsContextSpy.restore() }) + + it('Should set a message payload size when producing a message', async () => { + const messages = [{ key: 'key1', value: 'test2' }] + if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { + DataStreamsProcessor.prototype.recordCheckpoint.restore() + } + const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') + await sendMessages(kafka, testTopic, messages) + expect(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) + recordCheckpointSpy.restore() + }) + + it('Should set a message payload size when consuming a message', async () => { + const messages = [{ key: 'key1', value: 'test2' }] + if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { + DataStreamsProcessor.prototype.recordCheckpoint.restore() + } + const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') + await sendMessages(kafka, testTopic, messages) + await consumer.run({ + eachMessage: async () => { + expect(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) + recordCheckpointSpy.restore() + } + }) + }) }) }) }) diff --git a/packages/dd-trace/src/datastreams/processor.js b/packages/dd-trace/src/datastreams/processor.js index edc9d865290..93c4e9191bf 100644 --- a/packages/dd-trace/src/datastreams/processor.js +++ b/packages/dd-trace/src/datastreams/processor.js @@ -4,12 +4,15 @@ const pkg = require('../../../../package.json') const Uint64 = require('int64-buffer').Uint64BE const { LogCollapsingLowestDenseDDSketch } = require('@datadog/sketches-js') - +const { encodePathwayContext } = require('./pathway') const { DataStreamsWriter } = require('./writer') const { computePathwayHash } = require('./pathway') +const { types } = require('util') + const ENTRY_PARENT_HASH = Buffer.from('0000000000000000', 'hex') const HIGH_ACCURACY_DISTRIBUTION = 0.0075 +const CONTEXT_PROPAGATION_KEY = 'dd-pathway-ctx' class StatsPoint { constructor (hash, parentHash, edgeTags) { @@ -18,6 +21,7 @@ class StatsPoint { this.edgeTags = edgeTags this.edgeLatency = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION) this.pathwayLatency = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION) + this.payloadSize = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION) } addLatencies (checkpoint) { @@ -25,6 +29,7 @@ class StatsPoint { const pathwayLatencySec = checkpoint.pathwayLatencyNs / 1e9 this.edgeLatency.accept(edgeLatencySec) this.pathwayLatency.accept(pathwayLatencySec) + this.payloadSize.accept(checkpoint.payloadSize) } encode () { @@ -33,7 +38,8 @@ class StatsPoint { ParentHash: this.parentHash, EdgeTags: this.edgeTags, EdgeLatency: this.edgeLatency.toProto(), - PathwayLatency: this.pathwayLatency.toProto() + PathwayLatency: this.pathwayLatency.toProto(), + PayloadSize: this.payloadSize.toProto() } } } @@ -49,6 +55,29 @@ class StatsBucket extends Map { } } +function getSizeOrZero (obj) { + if (typeof obj === 'string') { + return Buffer.from(obj, 'utf-8').length + } + if (types.isArrayBuffer(obj)) { + return obj.byteLength + } + if (Buffer.isBuffer(obj)) { + return obj.length + } + return 0 +} + +function getHeadersSize (headers) { + if (headers === undefined) return 0 + return Object.entries(headers).reduce((prev, [key, val]) => getSizeOrZero(key) + getSizeOrZero(val) + prev, 0) +} + +function getMessageSize (message) { + const { key, value, headers } = message + return getSizeOrZero(key) + getSizeOrZero(value) + getHeadersSize(headers) +} + class TimeBuckets extends Map { forTime (time) { if (!this.has(time)) { @@ -113,7 +142,7 @@ class DataStreamsProcessor { .addLatencies(checkpoint) } - setCheckpoint (edgeTags, ctx = null) { + setCheckpoint (edgeTags, ctx = null, payloadSize = 0) { if (!this.enabled) return null const nowNs = Date.now() * 1e6 const direction = edgeTags.find(t => t.startsWith('direction:')) @@ -147,23 +176,30 @@ class DataStreamsProcessor { const hash = computePathwayHash(this.service, this.env, edgeTags, parentHash) const edgeLatencyNs = nowNs - edgeStartNs const pathwayLatencyNs = nowNs - pathwayStartNs + const dataStreamsContext = { + hash: hash, + edgeStartNs: edgeStartNs, + pathwayStartNs: pathwayStartNs, + previousDirection: direction, + closestOppositeDirectionHash: closestOppositeDirectionHash, + closestOppositeDirectionEdgeStart: closestOppositeDirectionEdgeStart + } + if (direction === 'direction:out') { + // Add the header for this now, as the callee doesn't have access to context when producing + payloadSize += getSizeOrZero(encodePathwayContext(dataStreamsContext)) + payloadSize += CONTEXT_PROPAGATION_KEY.length + } const checkpoint = { currentTimestamp: nowNs, parentHash: parentHash, hash: hash, edgeTags: edgeTags, edgeLatencyNs: edgeLatencyNs, - pathwayLatencyNs: pathwayLatencyNs + pathwayLatencyNs: pathwayLatencyNs, + payloadSize: payloadSize } this.recordCheckpoint(checkpoint) - return { - hash: hash, - edgeStartNs: edgeStartNs, - pathwayStartNs: pathwayStartNs, - previousDirection: direction, - closestOppositeDirectionHash: closestOppositeDirectionHash, - closestOppositeDirectionEdgeStart: closestOppositeDirectionEdgeStart - } + return dataStreamsContext } _serializeBuckets () { @@ -194,5 +230,9 @@ module.exports = { StatsPoint: StatsPoint, StatsBucket: StatsBucket, TimeBuckets, - ENTRY_PARENT_HASH + getMessageSize, + getHeadersSize, + getSizeOrZero, + ENTRY_PARENT_HASH, + CONTEXT_PROPAGATION_KEY } diff --git a/packages/dd-trace/src/tracer.js b/packages/dd-trace/src/tracer.js index a90f1095424..ebdf3a68b8d 100644 --- a/packages/dd-trace/src/tracer.js +++ b/packages/dd-trace/src/tracer.js @@ -31,8 +31,10 @@ class DatadogTracer extends Tracer { // todo[piochelepiotr] These two methods are not related to the tracer, but to data streams monitoring. // They should be moved outside of the tracer in the future. - setCheckpoint (edgeTags) { - const ctx = this._dataStreamsProcessor.setCheckpoint(edgeTags, DataStreamsContext.getDataStreamsContext()) + setCheckpoint (edgeTags, payloadSize = 0) { + const ctx = this._dataStreamsProcessor.setCheckpoint( + edgeTags, DataStreamsContext.getDataStreamsContext(), payloadSize + ) DataStreamsContext.setDataStreamsContext(ctx) return ctx } diff --git a/packages/dd-trace/test/datastreams/encoding.spec.js b/packages/dd-trace/test/datastreams/encoding.spec.js index be5dd1d838c..ea7a78b17e3 100644 --- a/packages/dd-trace/test/datastreams/encoding.spec.js +++ b/packages/dd-trace/test/datastreams/encoding.spec.js @@ -1,5 +1,7 @@ 'use strict' +require('../setup/tap') + const { encodeVarint, decodeVarint } = require('../../src/datastreams/encoding') const { expect } = require('chai') diff --git a/packages/dd-trace/test/datastreams/pathway.spec.js b/packages/dd-trace/test/datastreams/pathway.spec.js index 7cf9746436d..0722f220f69 100644 --- a/packages/dd-trace/test/datastreams/pathway.spec.js +++ b/packages/dd-trace/test/datastreams/pathway.spec.js @@ -1,5 +1,7 @@ 'use strict' +require('../setup/tap') + const { expect } = require('chai') const { computePathwayHash, encodePathwayContext, decodePathwayContext } = require('../../src/datastreams/pathway') diff --git a/packages/dd-trace/test/datastreams/processor.spec.js b/packages/dd-trace/test/datastreams/processor.spec.js index 0d70d6941de..11425d039a1 100644 --- a/packages/dd-trace/test/datastreams/processor.spec.js +++ b/packages/dd-trace/test/datastreams/processor.spec.js @@ -21,7 +21,15 @@ const writer = { flush: sinon.stub() } const DataStreamsWriter = sinon.stub().returns(writer) -const { StatsPoint, StatsBucket, TimeBuckets, DataStreamsProcessor } = proxyquire('../src/datastreams/processor', { +const { + StatsPoint, + StatsBucket, + TimeBuckets, + DataStreamsProcessor, + getHeadersSize, + getMessageSize, + getSizeOrZero +} = proxyquire('../src/datastreams/processor', { './writer': { DataStreamsWriter } }) @@ -31,7 +39,8 @@ const mockCheckpoint = { parentHash: DEFAULT_PARENT_HASH, edgeTags: ['service:service-name', 'env:env-name', 'topic:test-topic'], edgeLatencyNs: DEFAULT_LATENCY, - pathwayLatencyNs: DEFAULT_LATENCY + pathwayLatencyNs: DEFAULT_LATENCY, + payloadSize: 100 } const anotherMockCheckpoint = { @@ -40,7 +49,8 @@ const anotherMockCheckpoint = { parentHash: ANOTHER_PARENT_HASH, edgeTags: ['service:service-name', 'env:env-name', 'topic:test-topic'], edgeLatencyNs: DEFAULT_LATENCY, - pathwayLatencyNs: DEFAULT_LATENCY + pathwayLatencyNs: DEFAULT_LATENCY, + payloadSize: 100 } describe('StatsPoint', () => { @@ -49,8 +59,10 @@ describe('StatsPoint', () => { aggStats.addLatencies(mockCheckpoint) const edgeLatency = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION) const pathwayLatency = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION) + const payloadSize = new LogCollapsingLowestDenseDDSketch(HIGH_ACCURACY_DISTRIBUTION) edgeLatency.accept(DEFAULT_LATENCY / 1e9) pathwayLatency.accept(DEFAULT_LATENCY / 1e9) + payloadSize.accept(100) const encoded = aggStats.encode() expect(encoded.Hash.toString()).to.equal(new Uint64(DEFAULT_CURRENT_HASH).toString()) @@ -58,6 +70,7 @@ describe('StatsPoint', () => { expect(encoded.EdgeTags).to.deep.equal(aggStats.edgeTags) expect(encoded.EdgeLatency).to.deep.equal(edgeLatency.toProto()) expect(encoded.PathwayLatency).to.deep.equal(pathwayLatency.toProto()) + expect(encoded.PayloadSize).to.deep.equal(payloadSize.toProto()) }) }) @@ -102,6 +115,7 @@ describe('DataStreamsProcessor', () => { let edgeLatency let pathwayLatency let processor + let payloadSize const config = { dsmEnabled: true, @@ -145,8 +159,10 @@ describe('DataStreamsProcessor', () => { edgeLatency = new LogCollapsingLowestDenseDDSketch(0.00775) pathwayLatency = new LogCollapsingLowestDenseDDSketch(0.00775) + payloadSize = new LogCollapsingLowestDenseDDSketch(0.00775) edgeLatency.accept(mockCheckpoint.edgeLatencyNs / 1e9) pathwayLatency.accept(mockCheckpoint.pathwayLatencyNs / 1e9) + payloadSize.accept(mockCheckpoint.payloadSize) const encoded = checkpointBucket.encode() expect(encoded.Hash.toString()).to.equal(new Uint64(DEFAULT_CURRENT_HASH).toString()) @@ -154,6 +170,7 @@ describe('DataStreamsProcessor', () => { expect(encoded.EdgeTags).to.deep.equal(mockCheckpoint.edgeTags) expect(encoded.EdgeLatency).to.deep.equal(edgeLatency.toProto()) expect(encoded.PathwayLatency).to.deep.equal(pathwayLatency.toProto()) + expect(encoded.PayloadSize).to.deep.equal(payloadSize.toProto()) }) it('should export on interval', () => { @@ -170,7 +187,8 @@ describe('DataStreamsProcessor', () => { ParentHash: new Uint64(DEFAULT_PARENT_HASH), EdgeTags: mockCheckpoint.edgeTags, EdgeLatency: edgeLatency.toProto(), - PathwayLatency: pathwayLatency.toProto() + PathwayLatency: pathwayLatency.toProto(), + PayloadSize: payloadSize.toProto() }] }], TracerVersion: pkg.version, @@ -178,3 +196,53 @@ describe('DataStreamsProcessor', () => { }) }) }) + +describe('getSizeOrZero', () => { + it('should return the size of a string', () => { + expect(getSizeOrZero('hello')).to.equal(5) + }) + + it('should handle unicode characters', () => { + // emoji is 4 bytes + expect(getSizeOrZero('hello 😀')).to.equal(10) + }) + + it('should return the size of an ArrayBuffer', () => { + const buffer = new ArrayBuffer(10) + expect(getSizeOrZero(buffer)).to.equal(10) + }) + + it('should return the size of a Buffer', () => { + const buffer = Buffer.from('hello', 'utf-8') + expect(getSizeOrZero(buffer)).to.equal(5) + }) +}) + +describe('getHeadersSize', () => { + it('should return 0 for undefined/empty headers', () => { + expect(getHeadersSize(undefined)).to.equal(0) + expect(getHeadersSize({})).to.equal(0) + }) + + it('should return the total size of all headers', () => { + const headers = { + 'Content-Type': 'application/json', + 'Content-Length': '100' + } + expect(getHeadersSize(headers)).to.equal(45) + }) +}) + +describe('getMessageSize', () => { + it('should return the size of a message', () => { + const message = { + key: 'key', + value: 'value', + headers: { + 'Content-Type': 'application/json', + 'Content-Length': '100' + } + } + expect(getMessageSize(message)).to.equal(53) + }) +}) From c299c771ee31a159e5206ce5fd0df228e57082e8 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Mon, 6 Nov 2023 01:14:41 -0800 Subject: [PATCH 052/147] use dc-polyfill instead of diagnostics_channel directly (#3722) * use dc-polyfill instead of diagnostics_channel - makes ESBuild plugin compatible with Node.js >= 12.17.0 * yarn.lock * Update packages/dd-trace/src/appsec/iast/path-line.js Co-authored-by: Igor Unanua * Update packages/dd-trace/src/appsec/iast/path-line.js Co-authored-by: Igor Unanua * Update packages/dd-trace/test/appsec/iast/path-line.spec.js Co-authored-by: Igor Unanua --------- Co-authored-by: Igor Unanua --- LICENSE-3rdparty.csv | 2 +- README.md | 6 +- .../test-api-manual/test.fake.js | 2 +- package.json | 2 +- .../src/storage/async_resource.js | 2 +- packages/datadog-esbuild/index.js | 21 +-- .../src/helpers/bundler-register.js | 3 +- .../src/helpers/instrument.js | 2 +- .../src/helpers/register.js | 2 +- .../test/body-parser.spec.js | 2 +- .../test/cookie-parser.spec.js | 2 +- .../test/express-mongo-sanitize.spec.js | 2 +- .../test/express.spec.js | 2 +- .../test/passport-http.spec.js | 2 +- .../test/passport-local.spec.js | 2 +- packages/datadog-plugin-fs/test/index.spec.js | 2 +- .../datadog-plugin-http/test/client.spec.js | 2 +- packages/dd-trace/src/appsec/channels.js | 2 +- packages/dd-trace/src/appsec/iast/iast-log.js | 2 +- .../dd-trace/src/appsec/iast/iast-plugin.js | 2 +- packages/dd-trace/src/appsec/iast/index.js | 2 +- .../dd-trace/src/appsec/iast/path-line.js | 2 +- .../appsec/iast/taint-tracking/rewriter.js | 2 +- packages/dd-trace/src/iitm.js | 2 +- packages/dd-trace/src/log/channels.js | 2 +- packages/dd-trace/src/plugin_manager.js | 2 +- packages/dd-trace/src/plugins/plugin.js | 2 +- .../dd-trace/src/profiling/profilers/wall.js | 2 +- packages/dd-trace/src/ritm.js | 2 +- .../dd-trace/src/telemetry/dependencies.js | 2 +- packages/dd-trace/src/telemetry/index.js | 2 +- packages/dd-trace/src/telemetry/logs/index.js | 2 +- .../set-cookies-header-interceptor.spec.js | 2 +- .../analyzers/sql-injection-analyzer.spec.js | 2 +- .../analyzers/vulnerability-analyzer.spec.js | 2 +- .../test/appsec/iast/iast-log.spec.js | 2 +- .../test/appsec/iast/iast-plugin.spec.js | 2 +- .../test/appsec/iast/path-line.spec.js | 2 +- .../appsec/iast/taint-tracking/plugin.spec.js | 2 +- packages/dd-trace/test/iitm.spec.js | 2 +- packages/dd-trace/test/plugin_manager.spec.js | 2 +- .../dd-trace/test/plugins/log_plugin.spec.js | 2 +- .../dd-trace/test/plugins/tracing.spec.js | 2 +- packages/dd-trace/test/ritm.spec.js | 2 +- .../test/telemetry/dependencies.spec.js | 4 +- .../test/telemetry/logs/index.spec.js | 10 +- packages/diagnostics_channel/index.js | 3 - packages/diagnostics_channel/src/index.js | 121 ------------------ yarn.lock | 10 +- 49 files changed, 58 insertions(+), 202 deletions(-) delete mode 100644 packages/diagnostics_channel/index.js delete mode 100644 packages/diagnostics_channel/src/index.js diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index d1aa41b234b..117211f0034 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -8,7 +8,7 @@ require,@datadog/sketches-js,Apache license 2.0,Copyright 2020 Datadog Inc. require,@opentelemetry/api,Apache license 2.0,Copyright OpenTelemetry Authors require,@opentelemetry/core,Apache license 2.0,Copyright OpenTelemetry Authors require,crypto-randomuuid,MIT,Copyright 2021 Node.js Foundation and contributors -require,diagnostics_channel,MIT,Copyright 2021 Simon D. +require,dc-polyfill,MIT,Copyright 2023 Datadog Inc. require,ignore,MIT,Copyright 2013 Kael Zhang and contributors require,import-in-the-middle,Apache license 2.0,Copyright 2021 Datadog Inc. require,int64-buffer,MIT,Copyright 2015-2016 Yusuke Kawasaki diff --git a/README.md b/README.md index 6c29011eab9..8a3d9372125 100644 --- a/README.md +++ b/README.md @@ -202,11 +202,11 @@ To get around this, one can treat all third party modules, or at least third par For these reasons it's necessary to have custom-built bundler plugins. Such plugins are able to instruct the bundler on how to behave, injecting intermediary code and otherwise intercepting the "translated" `require()` calls. The result is that many more packages are then included in the bundled JavaScript file. Some applications can have 100% of modules bundled, however native modules still need to remain external to the bundle. -### Esbuild Support +### ESBuild Support -This library provides experimental esbuild support in the form of an esbuild plugin, and currently requires at least Node.js v16.17 or v18.7. To use the plugin, make sure you have `dd-trace@3+` installed, and then require the `dd-trace/esbuild` module when building your bundle. +This library provides experimental ESBuild support in the form of an ESBuild plugin. Require the `dd-trace/esbuild` module when building your bundle to enable the plugin. -Here's an example of how one might use `dd-trace` with esbuild: +Here's an example of how one might use `dd-trace` with ESBuild: ```javascript const ddPlugin = require('dd-trace/esbuild') diff --git a/integration-tests/ci-visibility/test-api-manual/test.fake.js b/integration-tests/ci-visibility/test-api-manual/test.fake.js index bc1f17972b7..11f35dd8e87 100644 --- a/integration-tests/ci-visibility/test-api-manual/test.fake.js +++ b/integration-tests/ci-visibility/test-api-manual/test.fake.js @@ -1,5 +1,5 @@ /* eslint-disable */ -const { channel } = require('diagnostics_channel') +const { channel } = require('dc-polyfill') const tracer = require('dd-trace') const testStartCh = channel('dd-trace:ci:manual:test:start') diff --git a/package.json b/package.json index 2f11b62aa05..21b0db70618 100644 --- a/package.json +++ b/package.json @@ -77,7 +77,7 @@ "@opentelemetry/api": "^1.0.0", "@opentelemetry/core": "^1.14.0", "crypto-randomuuid": "^1.0.0", - "diagnostics_channel": "^1.1.0", + "dc-polyfill": "^0.1.2", "ignore": "^5.2.4", "import-in-the-middle": "^1.4.2", "int64-buffer": "^0.1.9", diff --git a/packages/datadog-core/src/storage/async_resource.js b/packages/datadog-core/src/storage/async_resource.js index 6dea8cf2fec..4738845e415 100644 --- a/packages/datadog-core/src/storage/async_resource.js +++ b/packages/datadog-core/src/storage/async_resource.js @@ -1,7 +1,7 @@ 'use strict' const { createHook, executionAsyncResource } = require('async_hooks') -const { channel } = require('../../../diagnostics_channel') +const { channel } = require('dc-polyfill') const beforeCh = channel('dd-trace:storage:before') const afterCh = channel('dd-trace:storage:after') diff --git a/packages/datadog-esbuild/index.js b/packages/datadog-esbuild/index.js index 94e4bed04bb..84454213e7d 100644 --- a/packages/datadog-esbuild/index.js +++ b/packages/datadog-esbuild/index.js @@ -5,8 +5,6 @@ const instrumentations = require('../datadog-instrumentations/src/helpers/instrumentations.js') const hooks = require('../datadog-instrumentations/src/helpers/hooks.js') -warnIfUnsupported() - for (const hook of Object.values(hooks)) { hook() } @@ -144,7 +142,7 @@ module.exports.setup = function (build) { ${fileCode} })(...arguments); { - const dc = require('diagnostics_channel'); + const dc = require('dc-polyfill'); const ch = dc.channel('${CHANNEL}'); const mod = module.exports const payload = { @@ -167,23 +165,6 @@ module.exports.setup = function (build) { }) } -// Currently esbuild support requires Node.js >=v16.17 or >=v18.7 -// Better yet it would support Node >=v14.17 or >=v16 -// Of course, the most ideal would be to support all versions of Node that dd-trace supports. -// Version constraints based on Node's diagnostics_channel support -function warnIfUnsupported () { - const [major, minor] = process.versions.node.split('.').map(Number) - if ( - major < 16 || - (major === 16 && minor < 17) || - (major === 18 && minor < 7)) { - console.error('WARNING: Esbuild support isn\'t available for older versions of Node.js.') - console.error(`Expected: Node.js >=v16.17 or >=v18.7. Actual: Node.js = ${process.version}.`) - console.error('This application may build properly with this version of Node.js, but unless a') - console.error('more recent version is used at runtime, third party packages won\'t be instrumented.') - } -} - // @see https://github.com/nodejs/node/issues/47000 function dotFriendlyResolve (path, directory) { if (path === '.') { diff --git a/packages/datadog-instrumentations/src/helpers/bundler-register.js b/packages/datadog-instrumentations/src/helpers/bundler-register.js index 75a0ce7aeb6..a5dfead9669 100644 --- a/packages/datadog-instrumentations/src/helpers/bundler-register.js +++ b/packages/datadog-instrumentations/src/helpers/bundler-register.js @@ -1,7 +1,6 @@ 'use strict' -// eslint-disable-next-line n/no-restricted-require -const dc = require('diagnostics_channel') +const dc = require('dc-polyfill') const { filename, diff --git a/packages/datadog-instrumentations/src/helpers/instrument.js b/packages/datadog-instrumentations/src/helpers/instrument.js index 51ab92fc629..323c6b01624 100644 --- a/packages/datadog-instrumentations/src/helpers/instrument.js +++ b/packages/datadog-instrumentations/src/helpers/instrument.js @@ -1,6 +1,6 @@ 'use strict' -const dc = require('../../../diagnostics_channel') +const dc = require('dc-polyfill') const semver = require('semver') const instrumentations = require('./instrumentations') const { AsyncResource } = require('async_hooks') diff --git a/packages/datadog-instrumentations/src/helpers/register.js b/packages/datadog-instrumentations/src/helpers/register.js index 910b945ef8a..e89a91b55f2 100644 --- a/packages/datadog-instrumentations/src/helpers/register.js +++ b/packages/datadog-instrumentations/src/helpers/register.js @@ -1,6 +1,6 @@ 'use strict' -const { channel } = require('../../../diagnostics_channel') +const { channel } = require('dc-polyfill') const path = require('path') const semver = require('semver') const Hook = require('./hook') diff --git a/packages/datadog-instrumentations/test/body-parser.spec.js b/packages/datadog-instrumentations/test/body-parser.spec.js index 43f2d7a356c..d502bc00ea6 100644 --- a/packages/datadog-instrumentations/test/body-parser.spec.js +++ b/packages/datadog-instrumentations/test/body-parser.spec.js @@ -1,7 +1,7 @@ 'use strict' const getPort = require('get-port') -const dc = require('../../diagnostics_channel') +const dc = require('dc-polyfill') const axios = require('axios') const agent = require('../../dd-trace/test/plugins/agent') diff --git a/packages/datadog-instrumentations/test/cookie-parser.spec.js b/packages/datadog-instrumentations/test/cookie-parser.spec.js index 2eb9da4e11b..4137ddbef63 100644 --- a/packages/datadog-instrumentations/test/cookie-parser.spec.js +++ b/packages/datadog-instrumentations/test/cookie-parser.spec.js @@ -2,7 +2,7 @@ const { assert } = require('chai') const getPort = require('get-port') -const dc = require('../../diagnostics_channel') +const dc = require('dc-polyfill') const axios = require('axios') const agent = require('../../dd-trace/test/plugins/agent') diff --git a/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js b/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js index 672ea492747..d9a314d5abc 100644 --- a/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js +++ b/packages/datadog-instrumentations/test/express-mongo-sanitize.spec.js @@ -2,7 +2,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const getPort = require('get-port') -const { channel } = require('../../diagnostics_channel') +const { channel } = require('dc-polyfill') const axios = require('axios') describe('express-mongo-sanitize', () => { withVersions('express-mongo-sanitize', 'express-mongo-sanitize', version => { diff --git a/packages/datadog-instrumentations/test/express.spec.js b/packages/datadog-instrumentations/test/express.spec.js index 00c43f2f042..88f75164be6 100644 --- a/packages/datadog-instrumentations/test/express.spec.js +++ b/packages/datadog-instrumentations/test/express.spec.js @@ -3,7 +3,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const getPort = require('get-port') const axios = require('axios') -const dc = require('../../diagnostics_channel') +const dc = require('dc-polyfill') withVersions('express', 'express', version => { describe('express query instrumentation', () => { diff --git a/packages/datadog-instrumentations/test/passport-http.spec.js b/packages/datadog-instrumentations/test/passport-http.spec.js index e772a6680b3..e9906f7de0e 100644 --- a/packages/datadog-instrumentations/test/passport-http.spec.js +++ b/packages/datadog-instrumentations/test/passport-http.spec.js @@ -3,7 +3,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const getPort = require('get-port') const axios = require('axios') -const dc = require('../../diagnostics_channel') +const dc = require('dc-polyfill') withVersions('passport-http', 'passport-http', version => { describe('passport-http instrumentation', () => { diff --git a/packages/datadog-instrumentations/test/passport-local.spec.js b/packages/datadog-instrumentations/test/passport-local.spec.js index 78c9521ab20..c7a43c50c2e 100644 --- a/packages/datadog-instrumentations/test/passport-local.spec.js +++ b/packages/datadog-instrumentations/test/passport-local.spec.js @@ -3,7 +3,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const getPort = require('get-port') const axios = require('axios') -const dc = require('../../diagnostics_channel') +const dc = require('dc-polyfill') withVersions('passport-local', 'passport-local', version => { describe('passport-local instrumentation', () => { diff --git a/packages/datadog-plugin-fs/test/index.spec.js b/packages/datadog-plugin-fs/test/index.spec.js index 0e242adcdf2..da1310010c9 100644 --- a/packages/datadog-plugin-fs/test/index.spec.js +++ b/packages/datadog-plugin-fs/test/index.spec.js @@ -10,7 +10,7 @@ const semver = require('semver') const rimraf = require('rimraf') const util = require('util') const plugins = require('../../dd-trace/src/plugins') -const { channel } = require('../../diagnostics_channel') +const { channel } = require('dc-polyfill') const hasWritev = semver.satisfies(process.versions.node, '>=12.9.0') const hasOSymlink = realFS.constants.O_SYMLINK diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index 73aba7d205e..ca2a89cf67d 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -971,7 +971,7 @@ describe('Plugin', () => { beforeEach(() => { return agent.load('http', { server: false }) .then(() => { - ch = require('../../diagnostics_channel').channel('apm:http:client:request:start') + ch = require('dc-polyfill').channel('apm:http:client:request:start') sub = () => {} tracer = require('../../dd-trace') http = require(protocol) diff --git a/packages/dd-trace/src/appsec/channels.js b/packages/dd-trace/src/appsec/channels.js index 26d3dbb0355..cf31b12d233 100644 --- a/packages/dd-trace/src/appsec/channels.js +++ b/packages/dd-trace/src/appsec/channels.js @@ -1,6 +1,6 @@ 'use strict' -const dc = require('../../../diagnostics_channel') +const dc = require('dc-polyfill') // TODO: use TBD naming convention module.exports = { diff --git a/packages/dd-trace/src/appsec/iast/iast-log.js b/packages/dd-trace/src/appsec/iast/iast-log.js index 36b10f6fce5..25b33c3bf44 100644 --- a/packages/dd-trace/src/appsec/iast/iast-log.js +++ b/packages/dd-trace/src/appsec/iast/iast-log.js @@ -1,6 +1,6 @@ 'use strict' -const dc = require('../../../../diagnostics_channel') +const dc = require('dc-polyfill') const log = require('../../log') const { calculateDDBasePath } = require('../../util') diff --git a/packages/dd-trace/src/appsec/iast/iast-plugin.js b/packages/dd-trace/src/appsec/iast/iast-plugin.js index 6c8d2c40535..2fe9f85bed6 100644 --- a/packages/dd-trace/src/appsec/iast/iast-plugin.js +++ b/packages/dd-trace/src/appsec/iast/iast-plugin.js @@ -1,6 +1,6 @@ 'use strict' -const { channel } = require('../../../../diagnostics_channel') +const { channel } = require('dc-polyfill') const iastLog = require('./iast-log') const Plugin = require('../../plugins/plugin') diff --git a/packages/dd-trace/src/appsec/iast/index.js b/packages/dd-trace/src/appsec/iast/index.js index 1abfcadac62..494c56c55a1 100644 --- a/packages/dd-trace/src/appsec/iast/index.js +++ b/packages/dd-trace/src/appsec/iast/index.js @@ -3,7 +3,7 @@ const { enableAllAnalyzers, disableAllAnalyzers } = require('./analyzers') const web = require('../../plugins/util/web') const { storage } = require('../../../../datadog-core') const overheadController = require('./overhead-controller') -const dc = require('../../../../diagnostics_channel') +const dc = require('dc-polyfill') const iastContextFunctions = require('./iast-context') const { enableTaintTracking, diff --git a/packages/dd-trace/src/appsec/iast/path-line.js b/packages/dd-trace/src/appsec/iast/path-line.js index 8a262e04976..11328ecdf15 100644 --- a/packages/dd-trace/src/appsec/iast/path-line.js +++ b/packages/dd-trace/src/appsec/iast/path-line.js @@ -13,7 +13,7 @@ const pathLine = { } const EXCLUDED_PATHS = [ - path.join(path.sep, 'node_modules', 'diagnostics_channel') + path.join(path.sep, 'node_modules', 'dc-polyfill') ] const EXCLUDED_PATH_PREFIXES = [ 'node:diagnostics_channel', diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js index dda4a65e122..71f6265b81d 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/rewriter.js @@ -7,7 +7,7 @@ const { isPrivateModule, isNotLibraryFile } = require('./filter') const { csiMethods } = require('./csi-methods') const { getName } = require('../telemetry/verbosity') const { getRewriteFunction } = require('./rewriter-telemetry') -const dc = require('../../../../../diagnostics_channel') +const dc = require('dc-polyfill') const hardcodedSecretCh = dc.channel('datadog:secrets:result') let rewriter diff --git a/packages/dd-trace/src/iitm.js b/packages/dd-trace/src/iitm.js index 94014d566a7..86a8d4dcecd 100644 --- a/packages/dd-trace/src/iitm.js +++ b/packages/dd-trace/src/iitm.js @@ -3,7 +3,7 @@ const semver = require('semver') const logger = require('./log') const { addHook } = require('import-in-the-middle') -const dc = require('../../diagnostics_channel') +const dc = require('dc-polyfill') if (semver.satisfies(process.versions.node, '>=14.13.1')) { const moduleLoadStartChannel = dc.channel('dd-trace:moduleLoadStart') diff --git a/packages/dd-trace/src/log/channels.js b/packages/dd-trace/src/log/channels.js index 0921f7f05a0..0bf84871b34 100644 --- a/packages/dd-trace/src/log/channels.js +++ b/packages/dd-trace/src/log/channels.js @@ -1,6 +1,6 @@ 'use strict' -const { channel } = require('../../../diagnostics_channel') +const { channel } = require('dc-polyfill') const Level = { Debug: 'debug', diff --git a/packages/dd-trace/src/plugin_manager.js b/packages/dd-trace/src/plugin_manager.js index f24c5a8033d..b7141a166d3 100644 --- a/packages/dd-trace/src/plugin_manager.js +++ b/packages/dd-trace/src/plugin_manager.js @@ -1,6 +1,6 @@ 'use strict' -const { channel } = require('../../diagnostics_channel') +const { channel } = require('dc-polyfill') const { isFalse } = require('./util') const plugins = require('./plugins') const log = require('./log') diff --git a/packages/dd-trace/src/plugins/plugin.js b/packages/dd-trace/src/plugins/plugin.js index f56b2d0c5b6..7bc5562bbcc 100644 --- a/packages/dd-trace/src/plugins/plugin.js +++ b/packages/dd-trace/src/plugins/plugin.js @@ -2,7 +2,7 @@ // TODO: move anything related to tracing to TracingPlugin instead -const dc = require('../../../diagnostics_channel') +const dc = require('dc-polyfill') const { storage } = require('../../../datadog-core') class Subscription { diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index ff4cf7d1570..5c6f01555e7 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -2,7 +2,7 @@ const { storage } = require('../../../../datadog-core') -const dc = require('../../../../diagnostics_channel') +const dc = require('dc-polyfill') const { HTTP_METHOD, HTTP_ROUTE, RESOURCE_NAME, SPAN_TYPE } = require('../../../../../ext/tags') const { WEB } = require('../../../../../ext/types') const runtimeMetrics = require('../../runtime_metrics') diff --git a/packages/dd-trace/src/ritm.js b/packages/dd-trace/src/ritm.js index 2385b3f3f4a..509e9ad732e 100644 --- a/packages/dd-trace/src/ritm.js +++ b/packages/dd-trace/src/ritm.js @@ -3,7 +3,7 @@ const path = require('path') const Module = require('module') const parse = require('module-details-from-path') -const dc = require('../../diagnostics_channel') +const dc = require('dc-polyfill') const origRequire = Module.prototype.require diff --git a/packages/dd-trace/src/telemetry/dependencies.js b/packages/dd-trace/src/telemetry/dependencies.js index 11c443fee98..6d502a748f3 100644 --- a/packages/dd-trace/src/telemetry/dependencies.js +++ b/packages/dd-trace/src/telemetry/dependencies.js @@ -4,7 +4,7 @@ const path = require('path') const parse = require('module-details-from-path') const requirePackageJson = require('../require-package-json') const { sendData } = require('./send-data') -const dc = require('../../../diagnostics_channel') +const dc = require('dc-polyfill') const { fileURLToPath } = require('url') const savedDependenciesToSend = new Set() diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js index b3de33bbc18..7b3ee094787 100644 --- a/packages/dd-trace/src/telemetry/index.js +++ b/packages/dd-trace/src/telemetry/index.js @@ -1,7 +1,7 @@ 'use strict' const tracerVersion = require('../../../../package.json').version -const dc = require('../../../diagnostics_channel') +const dc = require('dc-polyfill') const os = require('os') const dependencies = require('./dependencies') const { sendData } = require('./send-data') diff --git a/packages/dd-trace/src/telemetry/logs/index.js b/packages/dd-trace/src/telemetry/logs/index.js index 4bc7a6b4a24..4584061613e 100644 --- a/packages/dd-trace/src/telemetry/logs/index.js +++ b/packages/dd-trace/src/telemetry/logs/index.js @@ -1,6 +1,6 @@ 'use strict' -const dc = require('../../../../diagnostics_channel') +const dc = require('dc-polyfill') const logCollector = require('./log-collector') const { sendData } = require('../send-data') diff --git a/packages/dd-trace/test/appsec/iast/analyzers/set-cookies-header-interceptor.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/set-cookies-header-interceptor.spec.js index b1043f3243d..5cd707c5620 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/set-cookies-header-interceptor.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/set-cookies-header-interceptor.spec.js @@ -1,7 +1,7 @@ 'use strict' const setCookiesHeaderInterceptor = require('../../../../src/appsec/iast/analyzers/set-cookies-header-interceptor') -const dc = require('../../../../../diagnostics_channel') +const dc = require('dc-polyfill') const iastSetCookieChannel = dc.channel('datadog:iast:set-cookie') const setHeaderChannel = dc.channel('datadog:http:server:response:set-header:finish') diff --git a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js index e73e96d78cf..23b40545401 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/sql-injection-analyzer.spec.js @@ -3,7 +3,7 @@ const proxyquire = require('proxyquire') const iastLog = require('../../../../src/appsec/iast/iast-log') -const dc = require('../../../../../diagnostics_channel') +const dc = require('dc-polyfill') describe('sql-injection-analyzer', () => { const NOT_TAINTED_QUERY = 'no vulnerable query' diff --git a/packages/dd-trace/test/appsec/iast/analyzers/vulnerability-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/vulnerability-analyzer.spec.js index 22ee0a870cd..4d86990d887 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/vulnerability-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/vulnerability-analyzer.spec.js @@ -161,7 +161,7 @@ describe('vulnerability-analyzer', () => { expect(vulnerabilityAnalyzer._subscriptions).to.have.lengthOf(1) vulnerabilityAnalyzer._subscriptions[0].enable() - const dc = require('../../../../../diagnostics_channel') + const dc = require('dc-polyfill') expect(() => { dc.channel('dd-trace:test:error:sub').publish({}) }).to.not.throw() }) diff --git a/packages/dd-trace/test/appsec/iast/iast-log.spec.js b/packages/dd-trace/test/appsec/iast/iast-log.spec.js index c6dba11a461..b04dbec0faa 100644 --- a/packages/dd-trace/test/appsec/iast/iast-log.spec.js +++ b/packages/dd-trace/test/appsec/iast/iast-log.spec.js @@ -24,7 +24,7 @@ describe('IAST log', () => { } iastLog = proxyquire('../../../src/appsec/iast/iast-log', { - '../../../../diagnostics_channel': { + 'dc-polyfill': { channel: () => telemetryLog }, '../../log': log diff --git a/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js b/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js index 26256d537ba..ca8a3381676 100644 --- a/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/iast-plugin.spec.js @@ -1,7 +1,7 @@ 'use strict' const { expect } = require('chai') -const { channel } = require('../../../../diagnostics_channel') +const { channel } = require('dc-polyfill') const proxyquire = require('proxyquire') const { getExecutedMetric, getInstrumentedMetric, TagKey } = require('../../../src/appsec/iast/telemetry/iast-metric') diff --git a/packages/dd-trace/test/appsec/iast/path-line.spec.js b/packages/dd-trace/test/appsec/iast/path-line.spec.js index 50e2e0a1569..6dd5f9a77d7 100644 --- a/packages/dd-trace/test/appsec/iast/path-line.spec.js +++ b/packages/dd-trace/test/appsec/iast/path-line.spec.js @@ -32,7 +32,7 @@ describe('path-line', function () { const rootPath = tmpdir.slice(0, firstSep + 1) const DIAGNOSTICS_CHANNEL_PATHS = [ - path.join(rootPath, 'path', 'node_modules', 'diagnostics_channel', 'index.js'), + path.join(rootPath, 'path', 'node_modules', 'dc-polyfill'), 'node:diagnostics_channel', 'diagnostics_channel' ] diff --git a/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js b/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js index 68964910c89..c8d49ee933e 100644 --- a/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/taint-tracking/plugin.spec.js @@ -3,7 +3,7 @@ const proxyquire = require('proxyquire') const iastContextFunctions = require('../../../../src/appsec/iast/iast-context') const taintTrackingOperations = require('../../../../src/appsec/iast/taint-tracking/operations') -const dc = require('../../../../../diagnostics_channel') +const dc = require('dc-polyfill') const { HTTP_REQUEST_COOKIE_VALUE, HTTP_REQUEST_COOKIE_NAME, diff --git a/packages/dd-trace/test/iitm.spec.js b/packages/dd-trace/test/iitm.spec.js index d18ac10a03a..cbcc4ed2ac0 100644 --- a/packages/dd-trace/test/iitm.spec.js +++ b/packages/dd-trace/test/iitm.spec.js @@ -4,7 +4,7 @@ require('./setup/tap') const { expect } = require('chai') const semver = require('semver') -const dc = require('../../diagnostics_channel') +const dc = require('dc-polyfill') describe('iitm.js', () => { let hookFn diff --git a/packages/dd-trace/test/plugin_manager.spec.js b/packages/dd-trace/test/plugin_manager.spec.js index 3e2dd98d8dd..62a5b0a1bc9 100644 --- a/packages/dd-trace/test/plugin_manager.spec.js +++ b/packages/dd-trace/test/plugin_manager.spec.js @@ -2,7 +2,7 @@ require('./setup/tap') -const { channel } = require('../../diagnostics_channel') +const { channel } = require('dc-polyfill') const proxyquire = require('proxyquire') const loadChannel = channel('dd-trace:instrumentation:load') diff --git a/packages/dd-trace/test/plugins/log_plugin.spec.js b/packages/dd-trace/test/plugins/log_plugin.spec.js index 4cc8d6aa9fe..0a9c1599167 100644 --- a/packages/dd-trace/test/plugins/log_plugin.spec.js +++ b/packages/dd-trace/test/plugins/log_plugin.spec.js @@ -6,7 +6,7 @@ const LogPlugin = require('../../src/plugins/log_plugin') const Tracer = require('../../src/tracer') const Config = require('../../src/config') -const { channel } = require('../../../diagnostics_channel') +const { channel } = require('dc-polyfill') const { expect } = require('chai') const testLogChannel = channel('apm:test:log') diff --git a/packages/dd-trace/test/plugins/tracing.spec.js b/packages/dd-trace/test/plugins/tracing.spec.js index 632582b751f..b8331f7915e 100644 --- a/packages/dd-trace/test/plugins/tracing.spec.js +++ b/packages/dd-trace/test/plugins/tracing.spec.js @@ -5,7 +5,7 @@ require('../setup/tap') const TracingPlugin = require('../../src/plugins/tracing') const agent = require('../plugins/agent') const plugins = require('../../src/plugins') -const { channel } = require('../../../diagnostics_channel') +const { channel } = require('dc-polyfill') describe('TracingPlugin', () => { describe('startSpan method', () => { diff --git a/packages/dd-trace/test/ritm.spec.js b/packages/dd-trace/test/ritm.spec.js index 212abc4758b..e05eeb32a50 100644 --- a/packages/dd-trace/test/ritm.spec.js +++ b/packages/dd-trace/test/ritm.spec.js @@ -2,7 +2,7 @@ require('./setup/tap') -const dc = require('../../diagnostics_channel') +const dc = require('dc-polyfill') const { assert } = require('chai') const Hook = require('../src/ritm') diff --git a/packages/dd-trace/test/telemetry/dependencies.spec.js b/packages/dd-trace/test/telemetry/dependencies.spec.js index 65c6b4e8962..971d8066503 100644 --- a/packages/dd-trace/test/telemetry/dependencies.spec.js +++ b/packages/dd-trace/test/telemetry/dependencies.spec.js @@ -4,7 +4,7 @@ require('../setup/tap') const proxyquire = require('proxyquire') const path = require('path') -const dc = require('../../../diagnostics_channel') +const dc = require('dc-polyfill') const moduleLoadStartChannel = dc.channel('dd-trace:moduleLoadStart') const originalSetImmediate = global.setImmediate describe('dependencies', () => { @@ -13,7 +13,7 @@ describe('dependencies', () => { const subscribe = sinon.stub() const dc = { channel () { return { subscribe } } } const dependencies = proxyquire('../../src/telemetry/dependencies', { - '../../../diagnostics_channel': dc + 'dc-polyfill': dc }) dependencies.start() expect(subscribe).to.have.been.calledOnce diff --git a/packages/dd-trace/test/telemetry/logs/index.spec.js b/packages/dd-trace/test/telemetry/logs/index.spec.js index 822543ba243..82a2d380122 100644 --- a/packages/dd-trace/test/telemetry/logs/index.spec.js +++ b/packages/dd-trace/test/telemetry/logs/index.spec.js @@ -35,7 +35,7 @@ describe('telemetry logs', () => { describe('start', () => { it('should be enabled by default and subscribe', () => { const logs = proxyquire('../../../src/telemetry/logs', { - '../../../../diagnostics_channel': dc + 'dc-polyfill': dc }) logs.start(defaultConfig) @@ -45,7 +45,7 @@ describe('telemetry logs', () => { it('should be subscribe only once', () => { const logs = proxyquire('../../../src/telemetry/logs', { - '../../../../diagnostics_channel': dc + 'dc-polyfill': dc }) logs.start(defaultConfig) @@ -57,7 +57,7 @@ describe('telemetry logs', () => { it('should be disabled and not subscribe if DD_TELEMETRY_LOG_COLLECTION_ENABLED = false', () => { const logs = proxyquire('../../../src/telemetry/logs', { - '../../../../diagnostics_channel': dc + 'dc-polyfill': dc }) defaultConfig.telemetry.logCollection = false @@ -70,7 +70,7 @@ describe('telemetry logs', () => { describe('stop', () => { it('should unsubscribe configured listeners', () => { const logs = proxyquire('../../../src/telemetry/logs', { - '../../../../diagnostics_channel': dc + 'dc-polyfill': dc }) logs.start(defaultConfig) @@ -81,7 +81,7 @@ describe('telemetry logs', () => { }) describe('logCollector add', () => { - const dc = require('../../../../diagnostics_channel') + const dc = require('dc-polyfill') let logCollectorAdd let telemetryLog diff --git a/packages/diagnostics_channel/index.js b/packages/diagnostics_channel/index.js deleted file mode 100644 index ae91ce34dc5..00000000000 --- a/packages/diagnostics_channel/index.js +++ /dev/null @@ -1,3 +0,0 @@ -'use strict' - -module.exports = require('./src') diff --git a/packages/diagnostics_channel/src/index.js b/packages/diagnostics_channel/src/index.js deleted file mode 100644 index 6ca17ac258d..00000000000 --- a/packages/diagnostics_channel/src/index.js +++ /dev/null @@ -1,121 +0,0 @@ -'use strict' - -const { - Channel, - channel -} = require('diagnostics_channel') // eslint-disable-line n/no-restricted-require - -const [major, minor] = process.versions.node.split('.') -const channels = new WeakSet() - -// Our own DC with a limited subset of functionality stable across Node versions. -// TODO: Move the rest of the polyfill here. -// TODO: Switch to using global subscribe/unsubscribe/hasSubscribers. -const dc = { channel } - -// Prevent going to 0 subscribers to avoid bug in Node. -// See https://github.com/nodejs/node/pull/47520 -if (major === '19' && minor === '9') { - dc.channel = function () { - const ch = channel.apply(this, arguments) - - if (!channels.has(ch)) { - const subscribe = ch.subscribe - const unsubscribe = ch.unsubscribe - - ch.subscribe = function () { - delete ch.subscribe - delete ch.unsubscribe - - const result = subscribe.apply(this, arguments) - - this.subscribe(() => {}) // Keep it active forever. - - return result - } - - if (ch.unsubscribe === Channel.prototype.unsubscribe) { - // Needed because another subscriber could have subscribed to something - // that we unsubscribe to before the library is loaded. - ch.unsubscribe = function () { - delete ch.subscribe - delete ch.unsubscribe - - this.subscribe(() => {}) // Keep it active forever. - - return unsubscribe.apply(this, arguments) - } - } - - channels.add(ch) - } - - return ch - } -} - -if (!Channel.prototype.runStores) { - const ActiveChannelPrototype = getActiveChannelPrototype() - - Channel.prototype.bindStore = ActiveChannelPrototype.bindStore = function (store, transform) { - if (!this._stores) { - this._stores = new Map() - } - this._stores.set(store, transform) - } - - Channel.prototype.unbindStore = ActiveChannelPrototype.unbindStore = function (store) { - if (!this._stores) return - this._stores.delete(store) - } - - Channel.prototype.runStores = ActiveChannelPrototype.runStores = function (data, fn, thisArg, ...args) { - if (!this._stores) return Reflect.apply(fn, thisArg, args) - - let run = () => { - this.publish(data) - return Reflect.apply(fn, thisArg, args) - } - - for (const entry of this._stores.entries()) { - const store = entry[0] - const transform = entry[1] - run = wrapStoreRun(store, data, run, transform) - } - - return run() - } -} - -function defaultTransform (data) { - return data -} - -function wrapStoreRun (store, data, next, transform = defaultTransform) { - return () => { - let context - try { - context = transform(data) - } catch (err) { - process.nextTick(() => { - throw err - }) - return next() - } - - return store.run(context, next) - } -} - -function getActiveChannelPrototype () { - const dummyChannel = channel('foo') - const listener = () => {} - - dummyChannel.subscribe(listener) - const ActiveChannelPrototype = Object.getPrototypeOf(dummyChannel) - dummyChannel.unsubscribe(listener) - - return ActiveChannelPrototype -} - -module.exports = dc diff --git a/yarn.lock b/yarn.lock index ab11d83c275..cbcd5eaa37b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1604,6 +1604,11 @@ csstype@^3.0.2: resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9" integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== +dc-polyfill@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/dc-polyfill/-/dc-polyfill-0.1.2.tgz#99a2f120759317b9976999aa715183a1c44b1327" + integrity sha512-AJ4TWwkeOKF7+Wj301wdyK8L0D9SE8Fr7+eMein8UP8+Iyb1xuL3rXWXavsTEM1+vOqDLciYho4cpsvNY0RDGQ== + debug@2.6.9, debug@^2.6.9: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" @@ -1740,11 +1745,6 @@ detect-newline@^3.0.0: resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== -diagnostics_channel@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/diagnostics_channel/-/diagnostics_channel-1.1.0.tgz#bd66c49124ce3bac697dff57466464487f57cea5" - integrity sha512-OE1ngLDjSBPG6Tx0YATELzYzy3RKHC+7veQ8gLa8yS7AAgw65mFbVdcsu3501abqOZCEZqZyAIemB0zXlqDSuw== - diff@5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/diff/-/diff-5.0.0.tgz#7ed6ad76d859d030787ec35855f5b1daf31d852b" From 359e12453307087f7a312f1cc91205b1c93437fb Mon Sep 17 00:00:00 2001 From: simon-id Date: Wed, 8 Nov 2023 10:25:23 +0100 Subject: [PATCH 053/147] Update AppSec rules to 1.9.0 (#3772) --- packages/dd-trace/src/appsec/recommended.json | 320 +++++++++++++++--- 1 file changed, 272 insertions(+), 48 deletions(-) diff --git a/packages/dd-trace/src/appsec/recommended.json b/packages/dd-trace/src/appsec/recommended.json index eee31018ce5..fc316459b63 100644 --- a/packages/dd-trace/src/appsec/recommended.json +++ b/packages/dd-trace/src/appsec/recommended.json @@ -1,7 +1,7 @@ { "version": "2.2", "metadata": { - "rules_version": "1.8.0" + "rules_version": "1.9.0" }, "rules": [ { @@ -3004,6 +3004,7 @@ ], "regex": "]*>[\\s\\S]*?", "options": { + "case_sensitive": false, "min_length": 8 } }, @@ -4207,7 +4208,6 @@ "name": "Remote Command Execution: Java process spawn (CVE-2017-9805)", "tags": { "type": "java_code_injection", - "crs_id": "944110", "category": "attack_attempt", "cwe": "94", "capec": "1000/152/242" @@ -4235,48 +4235,16 @@ "address": "graphql.server.all_resolvers" } ], - "regex": "(?:runtime|processbuilder)", + "regex": "(?:unmarshaller|base64data|java\\.).*(?:runtime|processbuilder)", "options": { - "case_sensitive": true, - "min_length": 7 - } - }, - "operator": "match_regex" - }, - { - "parameters": { - "inputs": [ - { - "address": "server.request.query" - }, - { - "address": "server.request.body" - }, - { - "address": "server.request.path_params" - }, - { - "address": "server.request.headers.no_cookies" - }, - { - "address": "grpc.server.request.message" - }, - { - "address": "graphql.server.all_resolvers" - } - ], - "regex": "(?:unmarshaller|base64data|java\\.)", - "options": { - "case_sensitive": true, - "min_length": 5 + "case_sensitive": false, + "min_length": 13 } }, "operator": "match_regex" } ], - "transformers": [ - "lowercase" - ] + "transformers": [] }, { "id": "crs-944-130", @@ -4479,6 +4447,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "server.request.headers.no_cookies" } ], "regex": "[#%$]{(?:[^}]+[^\\w\\s}\\-_][^}]+|\\d+-\\d+)}", @@ -4752,7 +4723,7 @@ "address": "graphql.server.all_resolvers" } ], - "regex": "\\bqualysperiscope\\.com\\b" + "regex": "\\bqualysperiscope\\.com\\b|\\.oscomm\\." }, "operator": "match_regex" } @@ -4833,7 +4804,7 @@ "address": "graphql.server.all_resolvers" } ], - "regex": "\\b(?:webhook\\.site|\\.canarytokens\\.com|vii\\.one|act1on3\\.ru|gdsburp\\.com)\\b" + "regex": "\\b(?:webhook\\.site|\\.canarytokens\\.com|vii\\.one|act1on3\\.ru|gdsburp\\.com|arcticwolf\\.net|oob\\.li|htbiw\\.com|h4\\.vc|mochan\\.cloud|imshopping\\.com|bootstrapnodejs\\.com|mooo-ng\\.com|securitytrails\\.com|canyouhackit\\.io|7bae\\.xyz)\\b" }, "operator": "match_regex" } @@ -4955,7 +4926,7 @@ "address": "graphql.server.all_resolvers" } ], - "regex": "\\b(?:interact\\.sh|oast\\.(?:pro|live|site|online|fun|me))\\b" + "regex": "\\b(?:interact\\.sh|oast\\.(?:pro|live|site|online|fun|me)|indusfacefinder\\.in|where\\.land|syhunt\\.net|tssrt\\.de|boardofcyber\\.io|assetnote-callback\\.com|praetorianlabs\\.dev|netspi\\.sh)\\b" }, "operator": "match_regex" } @@ -4996,7 +4967,187 @@ "address": "graphql.server.all_resolvers" } ], - "regex": "\\b(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)r87(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)(?:me|com)\\b", + "regex": "\\b(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)?r87(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)(?:me|com)\\b", + "options": { + "case_sensitive": false, + "min_length": 7 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-009", + "name": "WhiteHat Security OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "WhiteHatSecurity", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + } + ], + "regex": "\\bwhsec(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)us\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-010", + "name": "Nessus OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "Nessus", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + } + ], + "regex": "\\b\\.nessus\\.org\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-011", + "name": "Watchtowr OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "Watchtowr", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + } + ], + "regex": "\\bwatchtowr\\.com\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, + { + "id": "dog-913-012", + "name": "AppCheck NG OOB domain", + "tags": { + "type": "commercial_scanner", + "category": "attack_attempt", + "tool_name": "AppCheckNG", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + } + ], + "regex": "\\bptst\\.io\\b", "options": { "case_sensitive": false, "min_length": 7 @@ -5048,6 +5199,50 @@ ], "transformers": [] }, + { + "id": "dog-932-100", + "name": "Shell spawn executing network command", + "tags": { + "type": "command_injection", + "category": "attack_attempt", + "cwe": "77", + "capec": "1000/152/248/88", + "confidence": "0" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.query" + }, + { + "address": "server.request.body" + }, + { + "address": "server.request.path_params" + }, + { + "address": "server.request.headers.no_cookies" + }, + { + "address": "grpc.server.request.message" + }, + { + "address": "graphql.server.all_resolvers" + } + ], + "regex": "(?:(?:['\"\\x60({|;&]|(?:^|['\"\\x60({|;&])(?:cmd(?:\\.exe)?\\s+(?:/\\w(?::\\w+)?\\s+)*))(?:ping|curl|wget|telnet)|\\bnslookup)[\\s,]", + "options": { + "case_sensitive": true, + "min_length": 5 + } + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, { "id": "dog-934-001", "name": "XXE - XML file loads external entity", @@ -5056,7 +5251,7 @@ "category": "attack_attempt", "cwe": "91", "capec": "1000/152/248/250", - "confidence": "0" + "confidence": "1" }, "conditions": [ { @@ -5091,7 +5286,7 @@ "category": "attack_attempt", "cwe": "83", "capec": "1000/152/242/63/591/243", - "confidence": "0" + "confidence": "1" }, "conditions": [ { @@ -5125,7 +5320,7 @@ "address": "graphql.server.all_resolvers" } ], - "regex": "<(?:iframe|esi:include)(?:(?:\\s|/)*\\w+=[\"'\\w]+)*(?:\\s|/)*src(?:doc)?=[\"']?(?:data:|javascript:|http:|//)[^\\s'\"]+['\"]?", + "regex": "<(?:iframe|esi:include)(?:(?:\\s|/)*\\w+=[\"'\\w]+)*(?:\\s|/)*src(?:doc)?=[\"']?(?:data:|javascript:|http:|dns:|//)[^\\s'\"]+['\"]?", "options": { "min_length": 14 } @@ -5171,7 +5366,7 @@ "address": "graphql.server.all_resolvers" } ], - "regex": "https?:\\/\\/(?:.*\\.)?(?:bxss\\.in|xss\\.ht|js\\.rip)", + "regex": "https?:\\/\\/(?:.*\\.)?(?:bxss\\.(?:in|me)|xss\\.ht|js\\.rip)", "options": { "case_sensitive": false } @@ -6110,7 +6305,7 @@ "address": "graphql.server.all_resolvers" } ], - "regex": "(http|https):\\/\\/(?:.*\\.)?(?:burpcollaborator\\.net|localtest\\.me|mail\\.ebc\\.apple\\.com|bugbounty\\.dod\\.network|.*\\.[nx]ip\\.io|oastify\\.com|oast\\.(?:pro|live|site|online|fun|me)|sslip\\.io|requestbin\\.com|requestbin\\.net|hookbin\\.com|webhook\\.site|canarytokens\\.com|interact\\.sh|ngrok\\.io|bugbounty\\.click|prbly\\.win|qualysperiscope\\.com|vii.one|act1on3.ru)" + "regex": "(http|https):\\/\\/(?:.*\\.)?(?:burpcollaborator\\.net|localtest\\.me|mail\\.ebc\\.apple\\.com|bugbounty\\.dod\\.network|.*\\.[nx]ip\\.io|oastify\\.com|oast\\.(?:pro|live|site|online|fun|me)|sslip\\.io|requestbin\\.com|requestbin\\.net|hookbin\\.com|webhook\\.site|canarytokens\\.com|interact\\.sh|ngrok\\.io|bugbounty\\.click|prbly\\.win|qualysperiscope\\.com|vii\\.one|act1on3\\.ru)" }, "operator": "match_regex" } @@ -7610,6 +7805,35 @@ ], "transformers": [] }, + { + "id": "ua0-600-63x", + "name": "FeroxBuster", + "tags": { + "type": "attack_tool", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "tool_name": "feroxbuster", + "confidence": "1" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.headers.no_cookies", + "key_path": [ + "user-agent" + ] + } + ], + "regex": "^feroxbuster/" + }, + "operator": "match_regex" + } + ], + "transformers": [] + }, { "id": "ua0-600-6xx", "name": "Stealthy scanner", @@ -7631,7 +7855,7 @@ ] } ], - "regex": "mozilla/4\\.0 \\(compatible(; msie (?:6\\.0; win32|4\\.0; Windows NT))?\\)", + "regex": "mozilla/4\\.0 \\(compatible(; msie (?:6\\.0; (?:win32|Windows NT 5\\.0)|4\\.0; Windows NT))?\\)", "options": { "case_sensitive": false } From e4e72a43e9373de3198766d397e47b10e53d2a2f Mon Sep 17 00:00:00 2001 From: Ida Liu <119438987+ida613@users.noreply.github.com> Date: Wed, 8 Nov 2023 09:11:32 -0500 Subject: [PATCH 054/147] Modified telemetry.enabled to comply with instrumentation telemetry specs (#3765) modified telemetry.enabled to comply with instrumentation telemetry specs --- benchmark/sirun/run-all-variants.js | 2 +- benchmark/sirun/run-one-variant.js | 2 +- packages/dd-trace/src/config.js | 7 ++++--- packages/dd-trace/test/config.spec.js | 16 ++++++++-------- packages/dd-trace/test/plugins/suite.js | 2 +- packages/dd-trace/test/setup/core.js | 2 +- 6 files changed, 16 insertions(+), 15 deletions(-) diff --git a/benchmark/sirun/run-all-variants.js b/benchmark/sirun/run-all-variants.js index a735903583c..60f6a65992d 100755 --- a/benchmark/sirun/run-all-variants.js +++ b/benchmark/sirun/run-all-variants.js @@ -6,7 +6,7 @@ const fs = require('fs') const path = require('path') const { exec, getStdio } = require('./run-util') -process.env.DD_TRACE_TELEMETRY_ENABLED = 'false' +process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED = 'false' require('./squash-affinity') diff --git a/benchmark/sirun/run-one-variant.js b/benchmark/sirun/run-one-variant.js index 1429250af4e..77bb147c9e7 100755 --- a/benchmark/sirun/run-one-variant.js +++ b/benchmark/sirun/run-one-variant.js @@ -4,7 +4,7 @@ const { exec, getStdio } = require('./run-util') -process.env.DD_TRACE_TELEMETRY_ENABLED = 'false' +process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED = 'false' const env = Object.assign({}, process.env, { DD_TRACE_STARTUP_LOGS: 'false' }) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 65d3df16e16..55932e5f74e 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -235,8 +235,9 @@ class Config { const inServerlessEnvironment = inAWSLambda || isGCPFunction || isAzureFunctionConsumptionPlan - const DD_TRACE_TELEMETRY_ENABLED = coalesce( - process.env.DD_TRACE_TELEMETRY_ENABLED, + const DD_INSTRUMENTATION_TELEMETRY_ENABLED = coalesce( + process.env.DD_TRACE_TELEMETRY_ENABLED, // for backward compatibility + process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED, // to comply with instrumentation telemetry specs !inServerlessEnvironment ) const DD_TELEMETRY_HEARTBEAT_INTERVAL = process.env.DD_TELEMETRY_HEARTBEAT_INTERVAL @@ -598,7 +599,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this.startupLogs = isTrue(DD_TRACE_STARTUP_LOGS) // Disabled for CI Visibility's agentless this.telemetry = { - enabled: DD_TRACE_EXPORTER !== 'datadog' && isTrue(DD_TRACE_TELEMETRY_ENABLED), + enabled: DD_TRACE_EXPORTER !== 'datadog' && isTrue(DD_INSTRUMENTATION_TELEMETRY_ENABLED), heartbeatInterval: DD_TELEMETRY_HEARTBEAT_INTERVAL, debug: isTrue(DD_TELEMETRY_DEBUG), logCollection: isTrue(DD_TELEMETRY_LOG_COLLECTION_ENABLED), diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 9694dbc50e8..3016980094b 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -904,7 +904,7 @@ describe('Config', () => { expect(config.tags).to.include({ foo: 'bar', baz: 'qux' }) }) - it('should not set DD_TRACE_TELEMETRY_ENABLED if AWS_LAMBDA_FUNCTION_NAME is present', () => { + it('should not set DD_INSTRUMENTATION_TELEMETRY_ENABLED if AWS_LAMBDA_FUNCTION_NAME is present', () => { process.env.AWS_LAMBDA_FUNCTION_NAME = 'my-great-lambda-function' const config = new Config() @@ -912,7 +912,7 @@ describe('Config', () => { expect(config.telemetry.enabled).to.be.false }) - it('should not set DD_TRACE_TELEMETRY_ENABLED if FUNCTION_NAME and GCP_PROJECT are present', () => { + it('should not set DD_INSTRUMENTATION_TELEMETRY_ENABLED if FUNCTION_NAME and GCP_PROJECT are present', () => { // FUNCTION_NAME and GCP_PROJECT env vars indicate a gcp function with a deprecated runtime process.env.FUNCTION_NAME = 'function_name' process.env.GCP_PROJECT = 'project_name' @@ -922,7 +922,7 @@ describe('Config', () => { expect(config.telemetry.enabled).to.be.false }) - it('should not set DD_TRACE_TELEMETRY_ENABLED if K_SERVICE and FUNCTION_TARGET are present', () => { + it('should not set DD_INSTRUMENTATION_TELEMETRY_ENABLED if K_SERVICE and FUNCTION_TARGET are present', () => { // K_SERVICE and FUNCTION_TARGET env vars indicate a gcp function with a newer runtime process.env.K_SERVICE = 'function_name' process.env.FUNCTION_TARGET = 'function_target' @@ -932,7 +932,7 @@ describe('Config', () => { expect(config.telemetry.enabled).to.be.false }) - it('should not set DD_TRACE_TELEMETRY_ENABLED if Azure Consumption Plan Function', () => { + it('should not set DD_INSTRUMENTATION_TELEMETRY_ENABLED if Azure Consumption Plan Function', () => { // AzureWebJobsScriptRoot and FUNCTIONS_EXTENSION_VERSION env vars indicate an azure function process.env.FUNCTIONS_WORKER_RUNTIME = 'node' process.env.FUNCTIONS_EXTENSION_VERSION = '4' @@ -965,15 +965,15 @@ describe('Config', () => { process.env.DD_TELEMETRY_HEARTBEAT_INTERVAL = origTelemetryHeartbeatIntervalValue }) - it('should not set DD_TRACE_TELEMETRY_ENABLED', () => { - const origTraceTelemetryValue = process.env.DD_TRACE_TELEMETRY_ENABLED - process.env.DD_TRACE_TELEMETRY_ENABLED = 'false' + it('should not set DD_INSTRUMENTATION_TELEMETRY_ENABLED', () => { + const origTraceTelemetryValue = process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED + process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED = 'false' const config = new Config() expect(config.telemetry.enabled).to.be.false - process.env.DD_TRACE_TELEMETRY_ENABLED = origTraceTelemetryValue + process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED = origTraceTelemetryValue }) it('should not set DD_TELEMETRY_METRICS_ENABLED', () => { diff --git a/packages/dd-trace/test/plugins/suite.js b/packages/dd-trace/test/plugins/suite.js index 0d9c27f3411..65d6e6ccb78 100644 --- a/packages/dd-trace/test/plugins/suite.js +++ b/packages/dd-trace/test/plugins/suite.js @@ -12,7 +12,7 @@ const url = require('url') const { once } = require('events') const { expect } = require('chai') -process.env.DD_TRACE_TELEMETRY_ENABLED = 'false' +process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED = 'false' const mkdtemp = util.promisify(fs.mkdtemp) diff --git a/packages/dd-trace/test/setup/core.js b/packages/dd-trace/test/setup/core.js index fa335567bb1..f7d32157a99 100644 --- a/packages/dd-trace/test/setup/core.js +++ b/packages/dd-trace/test/setup/core.js @@ -18,4 +18,4 @@ if (global.describe && typeof global.describe.skip !== 'function') { } } -process.env.DD_TRACE_TELEMETRY_ENABLED = 'false' +process.env.DD_INSTRUMENTATION_TELEMETRY_ENABLED = 'false' From 33f71b9ee000cfeebdeb7c68aa05c81593fb13e6 Mon Sep 17 00:00:00 2001 From: simon-id Date: Wed, 8 Nov 2023 16:58:50 +0100 Subject: [PATCH 055/147] Use exact version for @datadog/native-appsec (#3778) --- package.json | 2 +- yarn.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 21b0db70618..f2248a12761 100644 --- a/package.json +++ b/package.json @@ -68,7 +68,7 @@ "node": ">=16" }, "dependencies": { - "@datadog/native-appsec": "^4.0.0", + "@datadog/native-appsec": "4.0.0", "@datadog/native-iast-rewriter": "2.2.1", "@datadog/native-iast-taint-tracking": "1.6.3", "@datadog/native-metrics": "^2.0.0", diff --git a/yarn.lock b/yarn.lock index cbcd5eaa37b..0be504d0107 100644 --- a/yarn.lock +++ b/yarn.lock @@ -385,7 +385,7 @@ resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== -"@datadog/native-appsec@^4.0.0": +"@datadog/native-appsec@4.0.0": version "4.0.0" resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-4.0.0.tgz#ee08138b987dec557eac3650a43a972dac85b6a6" integrity sha512-myTguXJ3VQHS2E1ylNsSF1avNpDmq5t+K4Q47wdzeakGc3sDIDDyEbvuFTujl9c9wBIkup94O1mZj5DR37ajzA== From 079bed4d5f875213c7968b615644782a7137570a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Wed, 8 Nov 2023 17:08:10 +0100 Subject: [PATCH 056/147] [ci-visibility] Add flags to force code coverage reporting and test skipping (#3767) --- .../get-itr-configuration.js | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js index 40c7f1ad8c6..2c5aade51e7 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js @@ -67,15 +67,25 @@ function getItrConfiguration ({ try { const { data: { - attributes: { - code_coverage: isCodeCoverageEnabled, - tests_skipping: isSuitesSkippingEnabled - } + attributes } } = JSON.parse(res) - const config = { isCodeCoverageEnabled, isSuitesSkippingEnabled } - log.debug(() => `Received settings: ${config}`) - done(null, config) + + let isCodeCoverageEnabled = attributes.code_coverage + let isSuitesSkippingEnabled = attributes.tests_skipping + + log.debug(() => `Remote settings: ${{ isCodeCoverageEnabled, isSuitesSkippingEnabled }}`) + + if (process.env.DD_CIVISIBILITY_DANGEROUSLY_FORCE_COVERAGE) { + isCodeCoverageEnabled = true + log.debug(() => 'Dangerously set code coverage to true') + } + if (process.env.DD_CIVISIBILITY_DANGEROUSLY_FORCE_TEST_SKIPPING) { + isSuitesSkippingEnabled = true + log.debug(() => 'Dangerously set test skipping to true') + } + + done(null, { isCodeCoverageEnabled, isSuitesSkippingEnabled }) } catch (err) { done(err) } From 5402ad50161bef80b795a7227f81779a9d195b85 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Wed, 8 Nov 2023 18:25:22 +0100 Subject: [PATCH 057/147] PROF-8545: Restore eager release of tags, adapt endpoint profiling code (#3759) * Restores eager release of tags for exported spans in `process_spans.js`. * Moves tag fetching in `wall.js` from `_updateContext` to `_enter` to compensate for above. * Fixes some tests that relied on tags being lazily released. --- .../dd-trace/src/profiling/profilers/wall.js | 45 ++++++++++++------- packages/dd-trace/src/span_processor.js | 4 ++ packages/dd-trace/test/tracer.spec.js | 41 +++++++---------- 3 files changed, 48 insertions(+), 42 deletions(-) diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index 5c6f01555e7..1e60572fb22 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -120,6 +120,7 @@ class NativeWallProfiler { this._pprof.time.setContext(this._currentContext) this._lastSpan = undefined this._lastStartedSpans = undefined + this._lastWebTags = undefined this._lastSampleCount = 0 beforeCh.subscribe(this._enter) @@ -145,10 +146,29 @@ class NativeWallProfiler { const span = getActiveSpan() if (span) { this._lastSpan = span - this._lastStartedSpans = getStartedSpans(span.context()) + const startedSpans = getStartedSpans(span.context()) + this._lastStartedSpans = startedSpans + if (this._endpointCollectionEnabled) { + let found = false + // Find the first webspan starting from the end: + // There might be several webspans, for example with next.js, http plugin creates a first span + // and then next.js plugin creates a child span, and this child span haves the correct endpoint information. + for (let i = startedSpans.length - 1; i >= 0; i--) { + const tags = getSpanContextTags(startedSpans[i]) + if (isWebServerSpan(tags)) { + this._lastWebTags = tags + found = true + break + } + } + if (!found) { + this._lastWebTags = undefined + } + } } else { this._lastStartedSpans = undefined this._lastSpan = undefined + this._lastWebTags = undefined } } @@ -163,21 +183,11 @@ class NativeWallProfiler { context.rootSpanId = rootSpan.context().toSpanId() } } - if (this._endpointCollectionEnabled) { - const startedSpans = this._lastStartedSpans - // Find the first webspan starting from the end: - // There might be several webspans, for example with next.js, http plugin creates a first span - // and then next.js plugin creates a child span, and this child span haves the correct endpoint information. - for (let i = startedSpans.length - 1; i >= 0; i--) { - const tags = getSpanContextTags(startedSpans[i]) - if (isWebServerSpan(tags)) { - context.webTags = tags - // endpoint may not be determined yet, but keep it as fallback - // if tags are not available anymore during serialization - context.endpoint = endpointNameFromTags(tags) - break - } - } + if (this._lastWebTags) { + context.webTags = this._lastWebTags + // endpoint may not be determined yet, but keep it as fallback + // if tags are not available anymore during serialization + context.endpoint = endpointNameFromTags(this._lastWebTags) } } @@ -224,6 +234,9 @@ class NativeWallProfiler { beforeCh.unsubscribe(this._enter) enterCh.unsubscribe(this._enter) this._profilerState = undefined + this._lastSpan = undefined + this._lastStartedSpans = undefined + this._lastWebTags = undefined } this._started = false diff --git a/packages/dd-trace/src/span_processor.js b/packages/dd-trace/src/span_processor.js index c6e8c300529..aea348b11fb 100644 --- a/packages/dd-trace/src/span_processor.js +++ b/packages/dd-trace/src/span_processor.js @@ -138,6 +138,10 @@ class SpanProcessor { } } + for (const span of trace.finished) { + span.context()._tags = {} + } + trace.started = active trace.finished = [] } diff --git a/packages/dd-trace/test/tracer.spec.js b/packages/dd-trace/test/tracer.spec.js index 79c81aafec8..8591ebe3b8f 100644 --- a/packages/dd-trace/test/tracer.spec.js +++ b/packages/dd-trace/test/tracer.spec.js @@ -13,6 +13,7 @@ const { DD_MAJOR } = require('../../../version') const SPAN_TYPE = tags.SPAN_TYPE const RESOURCE_NAME = tags.RESOURCE_NAME const SERVICE_NAME = tags.SERVICE_NAME +const EXPORT_SERVICE_NAME = 'service' const BASE_SERVICE = tags.BASE_SERVICE const describeOrphanable = DD_MAJOR < 4 ? describe : describe.skip @@ -39,6 +40,7 @@ describe('Tracer', () => { tracer = new Tracer(config) tracer._exporter.setUrl = sinon.stub() + tracer._exporter.export = sinon.stub() tracer._prioritySampler.configure = sinon.stub() }) @@ -89,38 +91,25 @@ describe('Tracer', () => { }) describe('_dd.base_service', () => { - let genSpan - it('should be set when tracer.trace service mismatches configured service', () => { - tracer.trace('name', { service: 'custom' }, span => { - genSpan = span - }) - const tags = genSpan.context()._tags - expect(genSpan).to.be.instanceof(Span) - expect(tags).to.include({ - [BASE_SERVICE]: 'service', - [SERVICE_NAME]: 'custom' - }) + tracer.trace('name', { service: 'custom' }, () => {}) + const trace = tracer._exporter.export.getCall(0).args[0][0] + expect(trace).to.have.property(EXPORT_SERVICE_NAME, 'custom') + expect(trace.meta).to.have.property(BASE_SERVICE, 'service') }) it('should not be set when tracer.trace service is not supplied', () => { - tracer.trace('name', {}, span => { - genSpan = span - }) - const tags = genSpan.context()._tags - expect(genSpan).to.be.instanceof(Span) - expect(tags).to.have.property(SERVICE_NAME, 'service') - expect(tags).to.not.have.property(BASE_SERVICE) + tracer.trace('name', {}, () => {}) + const trace = tracer._exporter.export.getCall(0).args[0][0] + expect(trace).to.have.property(EXPORT_SERVICE_NAME, 'service') + expect(trace.meta).to.not.have.property(BASE_SERVICE) }) - it('should be set when tracer.trace service matched configured service', () => { - tracer.trace('name', { service: 'service' }, span => { - genSpan = span - }) - const tags = genSpan.context()._tags - expect(genSpan).to.be.instanceof(Span) - expect(tags).to.have.property(SERVICE_NAME, 'service') - expect(tags).to.not.have.property(BASE_SERVICE) + it('should not be set when tracer.trace service matched configured service', () => { + tracer.trace('name', { service: 'service' }, () => {}) + const trace = tracer._exporter.export.getCall(0).args[0][0] + expect(trace).to.have.property(EXPORT_SERVICE_NAME, 'service') + expect(trace.meta).to.not.have.property(BASE_SERVICE) }) }) From de3c1b6ed078a45b061e21b3669304f4fa446a51 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Thu, 9 Nov 2023 09:42:08 +0100 Subject: [PATCH 058/147] Run nextjs system-test in tracer CI (#3777) --- .github/workflows/system-tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index 7e56e57dd26..b37230370c2 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -18,6 +18,7 @@ jobs: include: - weblog-variant: express4 - weblog-variant: express4-typescript + - weblog-variant: nextjs env: TEST_LIBRARY: nodejs WEBLOG_VARIANT: ${{ matrix.weblog-variant }} From e8ce3912181f69a5db0b721fd73e8fa4fa054085 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Fri, 10 Nov 2023 17:22:30 +0100 Subject: [PATCH 059/147] Cache web span lookup, so we only perform it once per span (#3779) --- .../dd-trace/src/profiling/profilers/wall.js | 35 ++++++++++++------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index 1e60572fb22..82d92d0e24c 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -18,6 +18,8 @@ const threadName = (function () { return `${name} Event Loop` })() +const CachedWebTags = Symbol('NativeWallProfiler.CachedWebTags') + let kSampleCount function getActiveSpan () { @@ -149,20 +151,27 @@ class NativeWallProfiler { const startedSpans = getStartedSpans(span.context()) this._lastStartedSpans = startedSpans if (this._endpointCollectionEnabled) { - let found = false - // Find the first webspan starting from the end: - // There might be several webspans, for example with next.js, http plugin creates a first span - // and then next.js plugin creates a child span, and this child span haves the correct endpoint information. - for (let i = startedSpans.length - 1; i >= 0; i--) { - const tags = getSpanContextTags(startedSpans[i]) - if (isWebServerSpan(tags)) { - this._lastWebTags = tags - found = true - break + const cachedWebTags = span[CachedWebTags] + if (cachedWebTags === undefined) { + let found = false + // Find the first webspan starting from the end: + // There might be several webspans, for example with next.js, http plugin creates a first span + // and then next.js plugin creates a child span, and this child span haves the correct endpoint information. + for (let i = startedSpans.length - 1; i >= 0; i--) { + const tags = getSpanContextTags(startedSpans[i]) + if (isWebServerSpan(tags)) { + this._lastWebTags = tags + span[CachedWebTags] = tags + found = true + break + } } - } - if (!found) { - this._lastWebTags = undefined + if (!found) { + this._lastWebTags = undefined + span[CachedWebTags] = null // cache negative lookup result + } + } else { + this._lastWebTags = cachedWebTags } } } else { From 690042a64813602ed2981abffa7994c780f2bc86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Mon, 13 Nov 2023 11:21:05 +0100 Subject: [PATCH 060/147] [ci-visibility] Update git metadata extraction (#3771) --- packages/dd-trace/src/plugins/util/ci.js | 25 +- packages/dd-trace/src/plugins/util/git.js | 3 +- packages/dd-trace/src/plugins/util/url.js | 26 ++ .../src/plugins/util/user-provided-git.js | 15 +- .../plugins/util/ci-env/azurepipelines.json | 196 ++++++++++++++ .../test/plugins/util/ci-env/bitbucket.json | 133 ++++++++++ .../test/plugins/util/ci-env/bitrise.json | 147 +++++++++++ .../test/plugins/util/ci-env/buddy.json | 240 ++++++++++++++++++ .../test/plugins/util/ci-env/buildkite.json | 217 ++++++++++++++++ .../test/plugins/util/ci-env/circleci.json | 161 ++++++++++++ .../test/plugins/util/ci-env/codefresh.json | 162 ++++++++++++ .../test/plugins/util/ci-env/github.json | 154 ++++++++++- .../test/plugins/util/ci-env/gitlab.json | 180 ++++++++++--- .../test/plugins/util/ci-env/jenkins.json | 147 +++++++++++ .../test/plugins/util/ci-env/teamcity.json | 112 ++++++++ .../plugins/util/ci-env/usersupplied.json | 168 ++++++++++++ .../dd-trace/test/plugins/util/git.spec.js | 39 ++- .../dd-trace/test/plugins/util/url.spec.js | 40 +++ 18 files changed, 2095 insertions(+), 70 deletions(-) create mode 100644 packages/dd-trace/src/plugins/util/url.js create mode 100644 packages/dd-trace/test/plugins/util/url.spec.js diff --git a/packages/dd-trace/src/plugins/util/ci.js b/packages/dd-trace/src/plugins/util/ci.js index 8ced8a1d054..35e58c5a94e 100644 --- a/packages/dd-trace/src/plugins/util/ci.js +++ b/packages/dd-trace/src/plugins/util/ci.js @@ -1,5 +1,3 @@ -const URL = require('url').URL - const { GIT_BRANCH, GIT_COMMIT_SHA, @@ -24,6 +22,7 @@ const { CI_NODE_LABELS, CI_NODE_NAME } = require('./tags') +const { filterSensitiveInfoFromRepository } = require('./url') // Receives a string with the form 'John Doe ' // and returns { name: 'John Doe', email: 'john.doe@gmail.com' } @@ -67,20 +66,6 @@ function normalizeRef (ref) { return ref.replace(/origin\/|refs\/heads\/|tags\//gm, '') } -function filterSensitiveInfoFromRepository (repositoryUrl) { - if (repositoryUrl.startsWith('git@')) { - return repositoryUrl - } - - try { - const { protocol, hostname, pathname } = new URL(repositoryUrl) - - return `${protocol}//${hostname}${pathname}` - } catch (e) { - return '' - } -} - function resolveTilde (filePath) { if (!filePath || typeof filePath !== 'string') { return '' @@ -271,20 +256,22 @@ module.exports = { const ref = GITHUB_HEAD_REF || GITHUB_REF || '' const refKey = ref.includes('tags/') ? GIT_TAG : GIT_BRANCH + // Both pipeline URL and job URL include GITHUB_SERVER_URL, which can include user credentials, + // so we pass them through `filterSensitiveInfoFromRepository`. tags = { [CI_PIPELINE_ID]: GITHUB_RUN_ID, [CI_PIPELINE_NAME]: GITHUB_WORKFLOW, [CI_PIPELINE_NUMBER]: GITHUB_RUN_NUMBER, - [CI_PIPELINE_URL]: pipelineURL, + [CI_PIPELINE_URL]: filterSensitiveInfoFromRepository(pipelineURL), [CI_PROVIDER_NAME]: 'github', [GIT_COMMIT_SHA]: GITHUB_SHA, [GIT_REPOSITORY_URL]: repositoryURL, - [CI_JOB_URL]: jobUrl, + [CI_JOB_URL]: filterSensitiveInfoFromRepository(jobUrl), [CI_JOB_NAME]: GITHUB_JOB, [CI_WORKSPACE_PATH]: GITHUB_WORKSPACE, [refKey]: ref, [CI_ENV_VARS]: JSON.stringify({ - GITHUB_SERVER_URL, + GITHUB_SERVER_URL: filterSensitiveInfoFromRepository(GITHUB_SERVER_URL), GITHUB_REPOSITORY, GITHUB_RUN_ID, GITHUB_RUN_ATTEMPT diff --git a/packages/dd-trace/src/plugins/util/git.js b/packages/dd-trace/src/plugins/util/git.js index 6746ebd9343..3a640ff249b 100644 --- a/packages/dd-trace/src/plugins/util/git.js +++ b/packages/dd-trace/src/plugins/util/git.js @@ -19,6 +19,7 @@ const { GIT_COMMIT_AUTHOR_NAME, CI_WORKSPACE_PATH } = require('./tags') +const { filterSensitiveInfoFromRepository } = require('./url') const GIT_REV_LIST_MAX_BUFFER = 8 * 1024 * 1024 // 8MB @@ -214,7 +215,7 @@ function getGitMetadata (ciMetadata) { return { [GIT_REPOSITORY_URL]: - repositoryUrl || sanitizedExec('git', ['ls-remote', '--get-url']), + filterSensitiveInfoFromRepository(repositoryUrl || sanitizedExec('git', ['ls-remote', '--get-url'])), [GIT_COMMIT_MESSAGE]: commitMessage || sanitizedExec('git', ['show', '-s', '--format=%s']), [GIT_COMMIT_AUTHOR_DATE]: authorDate, diff --git a/packages/dd-trace/src/plugins/util/url.js b/packages/dd-trace/src/plugins/util/url.js new file mode 100644 index 00000000000..e7fb382b5df --- /dev/null +++ b/packages/dd-trace/src/plugins/util/url.js @@ -0,0 +1,26 @@ +const { URL } = require('url') + +function filterSensitiveInfoFromRepository (repositoryUrl) { + if (!repositoryUrl) { + return '' + } + if (repositoryUrl.startsWith('git@')) { + return repositoryUrl + } + + // Remove the username from ssh URLs + if (repositoryUrl.startsWith('ssh://')) { + const sshRegex = /^(ssh:\/\/)[^@/]*@/ + return repositoryUrl.replace(sshRegex, '$1') + } + + try { + const { protocol, host, pathname } = new URL(repositoryUrl) + + return `${protocol}//${host}${pathname === '/' ? '' : pathname}` + } catch (e) { + return '' + } +} + +module.exports = { filterSensitiveInfoFromRepository } diff --git a/packages/dd-trace/src/plugins/util/user-provided-git.js b/packages/dd-trace/src/plugins/util/user-provided-git.js index 7aab955e3a5..4a18a1c58be 100644 --- a/packages/dd-trace/src/plugins/util/user-provided-git.js +++ b/packages/dd-trace/src/plugins/util/user-provided-git.js @@ -13,7 +13,7 @@ const { } = require('./tags') const { normalizeRef } = require('./ci') -const { URL } = require('url') +const { filterSensitiveInfoFromRepository } = require('./url') function removeEmptyValues (tags) { return Object.keys(tags).reduce((filteredTags, tag) => { @@ -27,19 +27,6 @@ function removeEmptyValues (tags) { }, {}) } -function filterSensitiveInfoFromRepository (repositoryUrl) { - try { - if (repositoryUrl.startsWith('git@')) { - return repositoryUrl - } - const { protocol, hostname, pathname } = new URL(repositoryUrl) - - return `${protocol}//${hostname}${pathname}` - } catch (e) { - return repositoryUrl - } -} - // The regex is extracted from // https://github.com/jonschlinkert/is-git-url/blob/396965ffabf2f46656c8af4c47bef1d69f09292e/index.js#L9C15-L9C87 function validateGitRepositoryUrl (repoUrl) { diff --git a/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json b/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json index 9262c329866..594da6d147b 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json +++ b/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json @@ -677,5 +677,201 @@ "git.commit.message": "azure-pipelines-commit-message", "git.repository_url": "https://dev.azure.com/fabrikamfiber/repo.git" } + ], + [ + { + "BUILD_BUILDID": "azure-pipelines-build-id", + "BUILD_DEFINITIONNAME": "azure-pipelines-name", + "BUILD_REPOSITORY_URI": "https://user:password@dev.azure.com:1234/fabrikamfiber/repo.git", + "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", + "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", + "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", + "SYSTEM_JOBID": "azure-pipelines-job-id", + "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", + "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", + "TF_BUILD": "True" + }, + { + "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.pipeline.id": "azure-pipelines-build-id", + "ci.pipeline.name": "azure-pipelines-name", + "ci.pipeline.number": "azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.provider.name": "azurepipelines", + "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", + "git.commit.author.name": "azure-pipelines-commit-author", + "git.commit.message": "azure-pipelines-commit-message", + "git.repository_url": "https://dev.azure.com:1234/fabrikamfiber/repo.git" + } + ], + [ + { + "BUILD_BUILDID": "azure-pipelines-build-id", + "BUILD_DEFINITIONNAME": "azure-pipelines-name", + "BUILD_REPOSITORY_URI": "https://user:password@1.1.1.1:1234/fabrikamfiber/repo.git", + "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", + "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", + "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", + "SYSTEM_JOBID": "azure-pipelines-job-id", + "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", + "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", + "TF_BUILD": "True" + }, + { + "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.pipeline.id": "azure-pipelines-build-id", + "ci.pipeline.name": "azure-pipelines-name", + "ci.pipeline.number": "azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.provider.name": "azurepipelines", + "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", + "git.commit.author.name": "azure-pipelines-commit-author", + "git.commit.message": "azure-pipelines-commit-message", + "git.repository_url": "https://1.1.1.1:1234/fabrikamfiber/repo.git" + } + ], + [ + { + "BUILD_BUILDID": "azure-pipelines-build-id", + "BUILD_DEFINITIONNAME": "azure-pipelines-name", + "BUILD_REPOSITORY_URI": "https://user:password@1.1.1.1:1234/fabrikamfiber/repo_with_@_yeah.git", + "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", + "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", + "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", + "SYSTEM_JOBID": "azure-pipelines-job-id", + "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", + "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", + "TF_BUILD": "True" + }, + { + "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.pipeline.id": "azure-pipelines-build-id", + "ci.pipeline.name": "azure-pipelines-name", + "ci.pipeline.number": "azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.provider.name": "azurepipelines", + "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", + "git.commit.author.name": "azure-pipelines-commit-author", + "git.commit.message": "azure-pipelines-commit-message", + "git.repository_url": "https://1.1.1.1:1234/fabrikamfiber/repo_with_@_yeah.git" + } + ], + [ + { + "BUILD_BUILDID": "azure-pipelines-build-id", + "BUILD_DEFINITIONNAME": "azure-pipelines-name", + "BUILD_REPOSITORY_URI": "https://user@dev.azure.com/fabrikamfiber/repo.git", + "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", + "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", + "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", + "SYSTEM_JOBID": "azure-pipelines-job-id", + "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", + "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", + "TF_BUILD": "True" + }, + { + "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.pipeline.id": "azure-pipelines-build-id", + "ci.pipeline.name": "azure-pipelines-name", + "ci.pipeline.number": "azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.provider.name": "azurepipelines", + "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", + "git.commit.author.name": "azure-pipelines-commit-author", + "git.commit.message": "azure-pipelines-commit-message", + "git.repository_url": "https://dev.azure.com/fabrikamfiber/repo.git" + } + ], + [ + { + "BUILD_BUILDID": "azure-pipelines-build-id", + "BUILD_DEFINITIONNAME": "azure-pipelines-name", + "BUILD_REPOSITORY_URI": "ssh://user@host.xz:port/path/to/repo.git/", + "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", + "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", + "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", + "SYSTEM_JOBID": "azure-pipelines-job-id", + "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", + "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", + "TF_BUILD": "True" + }, + { + "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.pipeline.id": "azure-pipelines-build-id", + "ci.pipeline.name": "azure-pipelines-name", + "ci.pipeline.number": "azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.provider.name": "azurepipelines", + "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", + "git.commit.author.name": "azure-pipelines-commit-author", + "git.commit.message": "azure-pipelines-commit-message", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], + [ + { + "BUILD_BUILDID": "azure-pipelines-build-id", + "BUILD_DEFINITIONNAME": "azure-pipelines-name", + "BUILD_REPOSITORY_URI": "ssh://user:password@host.xz:port/path/to/repo.git/", + "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", + "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", + "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", + "SYSTEM_JOBID": "azure-pipelines-job-id", + "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", + "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", + "TF_BUILD": "True" + }, + { + "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.pipeline.id": "azure-pipelines-build-id", + "ci.pipeline.name": "azure-pipelines-name", + "ci.pipeline.number": "azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.provider.name": "azurepipelines", + "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", + "git.commit.author.name": "azure-pipelines-commit-author", + "git.commit.message": "azure-pipelines-commit-message", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], + [ + { + "BUILD_BUILDID": "azure-pipelines-build-id", + "BUILD_DEFINITIONNAME": "azure-pipelines-name", + "BUILD_REPOSITORY_URI": "ssh://user:password@1.1.1.1:port/path/to/repo.git/", + "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", + "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", + "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", + "SYSTEM_JOBID": "azure-pipelines-job-id", + "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", + "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", + "TF_BUILD": "True" + }, + { + "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.pipeline.id": "azure-pipelines-build-id", + "ci.pipeline.name": "azure-pipelines-name", + "ci.pipeline.number": "azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.provider.name": "azurepipelines", + "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", + "git.commit.author.name": "azure-pipelines-commit-author", + "git.commit.message": "azure-pipelines-commit-message", + "git.repository_url": "ssh://1.1.1.1:port/path/to/repo.git/" + } ] ] diff --git a/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json b/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json index 4b3c7e52c93..72d47cdff00 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json +++ b/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json @@ -418,5 +418,138 @@ "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://bitbucket.org/DataDog/dogweb.git" } + ], + [ + { + "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://user@bitbucket.org/DataDog/dogweb.git", + "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", + "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" + }, + { + "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.pipeline.id": "bitbucket-uuid", + "ci.pipeline.name": "bitbucket-repo", + "ci.pipeline.number": "bitbucket-build-num", + "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.provider.name": "bitbucket", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket.org/DataDog/dogweb.git" + } + ], + [ + { + "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://user:password@bitbucket.org:1234/DataDog/dogweb.git", + "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", + "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" + }, + { + "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.pipeline.id": "bitbucket-uuid", + "ci.pipeline.name": "bitbucket-repo", + "ci.pipeline.number": "bitbucket-build-num", + "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.provider.name": "bitbucket", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket.org:1234/DataDog/dogweb.git" + } + ], + [ + { + "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://user:password@1.1.1.1/DataDog/dogweb.git", + "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", + "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" + }, + { + "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.pipeline.id": "bitbucket-uuid", + "ci.pipeline.name": "bitbucket-repo", + "ci.pipeline.number": "bitbucket-build-num", + "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.provider.name": "bitbucket", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" + } + ], + [ + { + "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://user:password@1.1.1.1:1234/DataDog/dogweb.git", + "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", + "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" + }, + { + "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.pipeline.id": "bitbucket-uuid", + "ci.pipeline.name": "bitbucket-repo", + "ci.pipeline.number": "bitbucket-build-num", + "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.provider.name": "bitbucket", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb.git" + } + ], + [ + { + "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://user:password@1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git", + "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", + "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" + }, + { + "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.pipeline.id": "bitbucket-uuid", + "ci.pipeline.name": "bitbucket-repo", + "ci.pipeline.number": "bitbucket-build-num", + "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.provider.name": "bitbucket", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + } + ], + [ + { + "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "ssh://user@host.xz:port/path/to/repo.git/", + "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", + "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" + }, + { + "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.pipeline.id": "bitbucket-uuid", + "ci.pipeline.name": "bitbucket-repo", + "ci.pipeline.number": "bitbucket-build-num", + "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.provider.name": "bitbucket", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], + [ + { + "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "ssh://user:password@host.xz:port/path/to/repo.git/", + "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", + "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo" + }, + { + "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.pipeline.id": "bitbucket-uuid", + "ci.pipeline.name": "bitbucket-repo", + "ci.pipeline.number": "bitbucket-build-num", + "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.provider.name": "bitbucket", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } ] ] diff --git a/packages/dd-trace/test/plugins/util/ci-env/bitrise.json b/packages/dd-trace/test/plugins/util/ci-env/bitrise.json index 5563094dc01..6f5b52cdf90 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/bitrise.json +++ b/packages/dd-trace/test/plugins/util/ci-env/bitrise.json @@ -499,5 +499,152 @@ "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/DataDog/dogweb.git" } + ], + [ + { + "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", + "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", + "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://user@github.com/DataDog/dogweb.git" + }, + { + "ci.pipeline.id": "bitrise-pipeline-id", + "ci.pipeline.name": "bitrise-pipeline-name", + "ci.pipeline.number": "bitrise-pipeline-number", + "ci.pipeline.url": "https://bitrise-build-url.com//", + "ci.provider.name": "bitrise", + "git.commit.message": "bitrise-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/DataDog/dogweb.git" + } + ], + [ + { + "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", + "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", + "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://user:password@github.com:1234/DataDog/dogweb.git" + }, + { + "ci.pipeline.id": "bitrise-pipeline-id", + "ci.pipeline.name": "bitrise-pipeline-name", + "ci.pipeline.number": "bitrise-pipeline-number", + "ci.pipeline.url": "https://bitrise-build-url.com//", + "ci.provider.name": "bitrise", + "git.commit.message": "bitrise-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com:1234/DataDog/dogweb.git" + } + ], + [ + { + "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", + "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", + "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://user:password@1.1.1.1/DataDog/dogweb.git" + }, + { + "ci.pipeline.id": "bitrise-pipeline-id", + "ci.pipeline.name": "bitrise-pipeline-name", + "ci.pipeline.number": "bitrise-pipeline-number", + "ci.pipeline.url": "https://bitrise-build-url.com//", + "ci.provider.name": "bitrise", + "git.commit.message": "bitrise-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" + } + ], + [ + { + "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", + "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", + "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb.git" + }, + { + "ci.pipeline.id": "bitrise-pipeline-id", + "ci.pipeline.name": "bitrise-pipeline-name", + "ci.pipeline.number": "bitrise-pipeline-number", + "ci.pipeline.url": "https://bitrise-build-url.com//", + "ci.provider.name": "bitrise", + "git.commit.message": "bitrise-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb.git" + } + ], + [ + { + "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", + "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", + "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + }, + { + "ci.pipeline.id": "bitrise-pipeline-id", + "ci.pipeline.name": "bitrise-pipeline-name", + "ci.pipeline.number": "bitrise-pipeline-number", + "ci.pipeline.url": "https://bitrise-build-url.com//", + "ci.provider.name": "bitrise", + "git.commit.message": "bitrise-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + } + ], + [ + { + "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", + "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", + "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "ssh://user@host.xz:port/path/to/repo.git/" + }, + { + "ci.pipeline.id": "bitrise-pipeline-id", + "ci.pipeline.name": "bitrise-pipeline-name", + "ci.pipeline.number": "bitrise-pipeline-number", + "ci.pipeline.url": "https://bitrise-build-url.com//", + "ci.provider.name": "bitrise", + "git.commit.message": "bitrise-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], + [ + { + "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", + "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", + "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "ssh://user:password@host.xz:port/path/to/repo.git/" + }, + { + "ci.pipeline.id": "bitrise-pipeline-id", + "ci.pipeline.name": "bitrise-pipeline-name", + "ci.pipeline.number": "bitrise-pipeline-number", + "ci.pipeline.url": "https://bitrise-build-url.com//", + "ci.provider.name": "bitrise", + "git.commit.message": "bitrise-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } ] ] diff --git a/packages/dd-trace/test/plugins/util/ci-env/buddy.json b/packages/dd-trace/test/plugins/util/ci-env/buddy.json index 26bf616455b..007cc196652 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/buddy.json +++ b/packages/dd-trace/test/plugins/util/ci-env/buddy.json @@ -177,5 +177,245 @@ "git.repository_url": "git@github.com:DataDog/userrepo.git", "git.tag": "v1.0" } + ], + [ + { + "BUDDY": "true", + "BUDDY_EXECUTION_BRANCH": "master", + "BUDDY_EXECUTION_ID": "buddy-execution-id", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", + "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", + "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", + "BUDDY_EXECUTION_TAG": "v1.0", + "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "BUDDY_PIPELINE_ID": "456", + "BUDDY_PIPELINE_NAME": "Deploy to Production", + "BUDDY_SCM_URL": "https://user:password@github.com/buddyworks/my-project.git" + }, + { + "ci.pipeline.id": "456/buddy-execution-id", + "ci.pipeline.name": "Deploy to Production", + "ci.pipeline.number": "buddy-execution-id", + "ci.pipeline.url": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "ci.provider.name": "buddy", + "git.branch": "master", + "git.commit.committer.email": "mikebenson@buddy.works", + "git.commit.committer.name": "Mike Benson", + "git.commit.message": "Create buddy.yml", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/buddyworks/my-project.git", + "git.tag": "v1.0" + } + ], + [ + { + "BUDDY": "true", + "BUDDY_EXECUTION_BRANCH": "master", + "BUDDY_EXECUTION_ID": "buddy-execution-id", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", + "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", + "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", + "BUDDY_EXECUTION_TAG": "v1.0", + "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "BUDDY_PIPELINE_ID": "456", + "BUDDY_PIPELINE_NAME": "Deploy to Production", + "BUDDY_SCM_URL": "https://user@github.com/buddyworks/my-project.git" + }, + { + "ci.pipeline.id": "456/buddy-execution-id", + "ci.pipeline.name": "Deploy to Production", + "ci.pipeline.number": "buddy-execution-id", + "ci.pipeline.url": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "ci.provider.name": "buddy", + "git.branch": "master", + "git.commit.committer.email": "mikebenson@buddy.works", + "git.commit.committer.name": "Mike Benson", + "git.commit.message": "Create buddy.yml", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/buddyworks/my-project.git", + "git.tag": "v1.0" + } + ], + [ + { + "BUDDY": "true", + "BUDDY_EXECUTION_BRANCH": "master", + "BUDDY_EXECUTION_ID": "buddy-execution-id", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", + "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", + "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", + "BUDDY_EXECUTION_TAG": "v1.0", + "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "BUDDY_PIPELINE_ID": "456", + "BUDDY_PIPELINE_NAME": "Deploy to Production", + "BUDDY_SCM_URL": "https://user:password@github.com:1234/buddyworks/my-project.git" + }, + { + "ci.pipeline.id": "456/buddy-execution-id", + "ci.pipeline.name": "Deploy to Production", + "ci.pipeline.number": "buddy-execution-id", + "ci.pipeline.url": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "ci.provider.name": "buddy", + "git.branch": "master", + "git.commit.committer.email": "mikebenson@buddy.works", + "git.commit.committer.name": "Mike Benson", + "git.commit.message": "Create buddy.yml", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com:1234/buddyworks/my-project.git", + "git.tag": "v1.0" + } + ], + [ + { + "BUDDY": "true", + "BUDDY_EXECUTION_BRANCH": "master", + "BUDDY_EXECUTION_ID": "buddy-execution-id", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", + "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", + "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", + "BUDDY_EXECUTION_TAG": "v1.0", + "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "BUDDY_PIPELINE_ID": "456", + "BUDDY_PIPELINE_NAME": "Deploy to Production", + "BUDDY_SCM_URL": "https://user:password@1.1.1.1/buddyworks/my-project.git" + }, + { + "ci.pipeline.id": "456/buddy-execution-id", + "ci.pipeline.name": "Deploy to Production", + "ci.pipeline.number": "buddy-execution-id", + "ci.pipeline.url": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "ci.provider.name": "buddy", + "git.branch": "master", + "git.commit.committer.email": "mikebenson@buddy.works", + "git.commit.committer.name": "Mike Benson", + "git.commit.message": "Create buddy.yml", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1/buddyworks/my-project.git", + "git.tag": "v1.0" + } + ], + [ + { + "BUDDY": "true", + "BUDDY_EXECUTION_BRANCH": "master", + "BUDDY_EXECUTION_ID": "buddy-execution-id", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", + "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", + "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", + "BUDDY_EXECUTION_TAG": "v1.0", + "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "BUDDY_PIPELINE_ID": "456", + "BUDDY_PIPELINE_NAME": "Deploy to Production", + "BUDDY_SCM_URL": "https://user:password@1.1.1.1:1234/buddyworks/my-project.git" + }, + { + "ci.pipeline.id": "456/buddy-execution-id", + "ci.pipeline.name": "Deploy to Production", + "ci.pipeline.number": "buddy-execution-id", + "ci.pipeline.url": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "ci.provider.name": "buddy", + "git.branch": "master", + "git.commit.committer.email": "mikebenson@buddy.works", + "git.commit.committer.name": "Mike Benson", + "git.commit.message": "Create buddy.yml", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/buddyworks/my-project.git", + "git.tag": "v1.0" + } + ], + [ + { + "BUDDY": "true", + "BUDDY_EXECUTION_BRANCH": "master", + "BUDDY_EXECUTION_ID": "buddy-execution-id", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", + "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", + "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", + "BUDDY_EXECUTION_TAG": "v1.0", + "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "BUDDY_PIPELINE_ID": "456", + "BUDDY_PIPELINE_NAME": "Deploy to Production", + "BUDDY_SCM_URL": "https://user:password@1.1.1.1:1234/buddyworks/my-project_with_@_yeah.git" + }, + { + "ci.pipeline.id": "456/buddy-execution-id", + "ci.pipeline.name": "Deploy to Production", + "ci.pipeline.number": "buddy-execution-id", + "ci.pipeline.url": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "ci.provider.name": "buddy", + "git.branch": "master", + "git.commit.committer.email": "mikebenson@buddy.works", + "git.commit.committer.name": "Mike Benson", + "git.commit.message": "Create buddy.yml", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/buddyworks/my-project_with_@_yeah.git", + "git.tag": "v1.0" + } + ], + [ + { + "BUDDY": "true", + "BUDDY_EXECUTION_BRANCH": "master", + "BUDDY_EXECUTION_ID": "buddy-execution-id", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", + "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", + "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", + "BUDDY_EXECUTION_TAG": "v1.0", + "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "BUDDY_PIPELINE_ID": "456", + "BUDDY_PIPELINE_NAME": "Deploy to Production", + "BUDDY_SCM_URL": "ssh://user@host.xz:port/path/to/repo.git/" + }, + { + "ci.pipeline.id": "456/buddy-execution-id", + "ci.pipeline.name": "Deploy to Production", + "ci.pipeline.number": "buddy-execution-id", + "ci.pipeline.url": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "ci.provider.name": "buddy", + "git.branch": "master", + "git.commit.committer.email": "mikebenson@buddy.works", + "git.commit.committer.name": "Mike Benson", + "git.commit.message": "Create buddy.yml", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/", + "git.tag": "v1.0" + } + ], + [ + { + "BUDDY": "true", + "BUDDY_EXECUTION_BRANCH": "master", + "BUDDY_EXECUTION_ID": "buddy-execution-id", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", + "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", + "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", + "BUDDY_EXECUTION_TAG": "v1.0", + "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "BUDDY_PIPELINE_ID": "456", + "BUDDY_PIPELINE_NAME": "Deploy to Production", + "BUDDY_SCM_URL": "ssh://user:password@host.xz:port/path/to/repo.git/" + }, + { + "ci.pipeline.id": "456/buddy-execution-id", + "ci.pipeline.name": "Deploy to Production", + "ci.pipeline.number": "buddy-execution-id", + "ci.pipeline.url": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "ci.provider.name": "buddy", + "git.branch": "master", + "git.commit.committer.email": "mikebenson@buddy.works", + "git.commit.committer.name": "Mike Benson", + "git.commit.message": "Create buddy.yml", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/", + "git.tag": "v1.0" + } ] ] diff --git a/packages/dd-trace/test/plugins/util/ci-env/buildkite.json b/packages/dd-trace/test/plugins/util/ci-env/buildkite.json index c332fd740d7..421904b20e6 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/buildkite.json +++ b/packages/dd-trace/test/plugins/util/ci-env/buildkite.json @@ -704,6 +704,223 @@ "git.repository_url": "https://github.com/DataDog/dogweb.git" } ], + [ + { + "BUILDKITE": "true", + "BUILDKITE_BRANCH": "", + "BUILDKITE_BUILD_AUTHOR": "buildkite-git-commit-author-name", + "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", + "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", + "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUILDKITE_JOB_ID": "buildkite-job-id", + "BUILDKITE_MESSAGE": "buildkite-git-commit-message", + "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", + "BUILDKITE_REPO": "https://user@github.com/DataDog/dogweb.git", + "BUILDKITE_TAG": "" + }, + { + "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", + "ci.pipeline.id": "buildkite-pipeline-id", + "ci.pipeline.name": "buildkite-pipeline-name", + "ci.pipeline.number": "buildkite-pipeline-number", + "ci.pipeline.url": "https://buildkite-build-url.com", + "ci.provider.name": "buildkite", + "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", + "git.commit.author.name": "buildkite-git-commit-author-name", + "git.commit.message": "buildkite-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/DataDog/dogweb.git" + } + ], + [ + { + "BUILDKITE": "true", + "BUILDKITE_BRANCH": "", + "BUILDKITE_BUILD_AUTHOR": "buildkite-git-commit-author-name", + "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", + "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", + "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUILDKITE_JOB_ID": "buildkite-job-id", + "BUILDKITE_MESSAGE": "buildkite-git-commit-message", + "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", + "BUILDKITE_REPO": "https://user:password@github.com:1234/DataDog/dogweb.git", + "BUILDKITE_TAG": "" + }, + { + "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", + "ci.pipeline.id": "buildkite-pipeline-id", + "ci.pipeline.name": "buildkite-pipeline-name", + "ci.pipeline.number": "buildkite-pipeline-number", + "ci.pipeline.url": "https://buildkite-build-url.com", + "ci.provider.name": "buildkite", + "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", + "git.commit.author.name": "buildkite-git-commit-author-name", + "git.commit.message": "buildkite-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com:1234/DataDog/dogweb.git" + } + ], + [ + { + "BUILDKITE": "true", + "BUILDKITE_BRANCH": "", + "BUILDKITE_BUILD_AUTHOR": "buildkite-git-commit-author-name", + "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", + "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", + "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUILDKITE_JOB_ID": "buildkite-job-id", + "BUILDKITE_MESSAGE": "buildkite-git-commit-message", + "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", + "BUILDKITE_REPO": "https://user:password@1.1.1.1/DataDog/dogweb.git", + "BUILDKITE_TAG": "" + }, + { + "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", + "ci.pipeline.id": "buildkite-pipeline-id", + "ci.pipeline.name": "buildkite-pipeline-name", + "ci.pipeline.number": "buildkite-pipeline-number", + "ci.pipeline.url": "https://buildkite-build-url.com", + "ci.provider.name": "buildkite", + "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", + "git.commit.author.name": "buildkite-git-commit-author-name", + "git.commit.message": "buildkite-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" + } + ], + [ + { + "BUILDKITE": "true", + "BUILDKITE_BRANCH": "", + "BUILDKITE_BUILD_AUTHOR": "buildkite-git-commit-author-name", + "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", + "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", + "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUILDKITE_JOB_ID": "buildkite-job-id", + "BUILDKITE_MESSAGE": "buildkite-git-commit-message", + "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", + "BUILDKITE_REPO": "https://user:password@1.1.1.1:1234/DataDog/dogweb.git", + "BUILDKITE_TAG": "" + }, + { + "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", + "ci.pipeline.id": "buildkite-pipeline-id", + "ci.pipeline.name": "buildkite-pipeline-name", + "ci.pipeline.number": "buildkite-pipeline-number", + "ci.pipeline.url": "https://buildkite-build-url.com", + "ci.provider.name": "buildkite", + "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", + "git.commit.author.name": "buildkite-git-commit-author-name", + "git.commit.message": "buildkite-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb.git" + } + ], + [ + { + "BUILDKITE": "true", + "BUILDKITE_BRANCH": "", + "BUILDKITE_BUILD_AUTHOR": "buildkite-git-commit-author-name", + "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", + "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", + "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUILDKITE_JOB_ID": "buildkite-job-id", + "BUILDKITE_MESSAGE": "buildkite-git-commit-message", + "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", + "BUILDKITE_REPO": "https://user:password@1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git", + "BUILDKITE_TAG": "" + }, + { + "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", + "ci.pipeline.id": "buildkite-pipeline-id", + "ci.pipeline.name": "buildkite-pipeline-name", + "ci.pipeline.number": "buildkite-pipeline-number", + "ci.pipeline.url": "https://buildkite-build-url.com", + "ci.provider.name": "buildkite", + "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", + "git.commit.author.name": "buildkite-git-commit-author-name", + "git.commit.message": "buildkite-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + } + ], + [ + { + "BUILDKITE": "true", + "BUILDKITE_BRANCH": "", + "BUILDKITE_BUILD_AUTHOR": "buildkite-git-commit-author-name", + "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", + "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", + "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUILDKITE_JOB_ID": "buildkite-job-id", + "BUILDKITE_MESSAGE": "buildkite-git-commit-message", + "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", + "BUILDKITE_REPO": "ssh://user@host.xz:port/path/to/repo.git/", + "BUILDKITE_TAG": "" + }, + { + "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", + "ci.pipeline.id": "buildkite-pipeline-id", + "ci.pipeline.name": "buildkite-pipeline-name", + "ci.pipeline.number": "buildkite-pipeline-number", + "ci.pipeline.url": "https://buildkite-build-url.com", + "ci.provider.name": "buildkite", + "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", + "git.commit.author.name": "buildkite-git-commit-author-name", + "git.commit.message": "buildkite-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], + [ + { + "BUILDKITE": "true", + "BUILDKITE_BRANCH": "", + "BUILDKITE_BUILD_AUTHOR": "buildkite-git-commit-author-name", + "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", + "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", + "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUILDKITE_JOB_ID": "buildkite-job-id", + "BUILDKITE_MESSAGE": "buildkite-git-commit-message", + "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", + "BUILDKITE_REPO": "ssh://user:password@host.xz:port/path/to/repo.git/", + "BUILDKITE_TAG": "" + }, + { + "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", + "ci.pipeline.id": "buildkite-pipeline-id", + "ci.pipeline.name": "buildkite-pipeline-name", + "ci.pipeline.number": "buildkite-pipeline-number", + "ci.pipeline.url": "https://buildkite-build-url.com", + "ci.provider.name": "buildkite", + "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", + "git.commit.author.name": "buildkite-git-commit-author-name", + "git.commit.message": "buildkite-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], [ { "BUILDKITE": "true", diff --git a/packages/dd-trace/test/plugins/util/ci-env/circleci.json b/packages/dd-trace/test/plugins/util/ci-env/circleci.json index 8efa8c353f0..b9065be3bd6 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/circleci.json +++ b/packages/dd-trace/test/plugins/util/ci-env/circleci.json @@ -541,5 +541,166 @@ "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/DataDog/dogweb.git" } + ], + [ + { + "CIRCLECI": "circleCI", + "CIRCLE_BUILD_NUM": "circleci-pipeline-number", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", + "CIRCLE_JOB": "circleci-job-name", + "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", + "CIRCLE_REPOSITORY_URL": "https://user@github.com/DataDog/dogweb.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id" + }, + { + "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", + "ci.job.name": "circleci-job-name", + "ci.job.url": "https://circleci-build-url.com/", + "ci.pipeline.id": "circleci-pipeline-id", + "ci.pipeline.name": "circleci-pipeline-name", + "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", + "ci.provider.name": "circleci", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/DataDog/dogweb.git" + } + ], + [ + { + "CIRCLECI": "circleCI", + "CIRCLE_BUILD_NUM": "circleci-pipeline-number", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", + "CIRCLE_JOB": "circleci-job-name", + "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", + "CIRCLE_REPOSITORY_URL": "https://user:password@github.com:1234/DataDog/dogweb.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id" + }, + { + "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", + "ci.job.name": "circleci-job-name", + "ci.job.url": "https://circleci-build-url.com/", + "ci.pipeline.id": "circleci-pipeline-id", + "ci.pipeline.name": "circleci-pipeline-name", + "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", + "ci.provider.name": "circleci", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com:1234/DataDog/dogweb.git" + } + ], + [ + { + "CIRCLECI": "circleCI", + "CIRCLE_BUILD_NUM": "circleci-pipeline-number", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", + "CIRCLE_JOB": "circleci-job-name", + "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", + "CIRCLE_REPOSITORY_URL": "https://user:password@1.1.1.1/DataDog/dogweb.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id" + }, + { + "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", + "ci.job.name": "circleci-job-name", + "ci.job.url": "https://circleci-build-url.com/", + "ci.pipeline.id": "circleci-pipeline-id", + "ci.pipeline.name": "circleci-pipeline-name", + "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", + "ci.provider.name": "circleci", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" + } + ], + [ + { + "CIRCLECI": "circleCI", + "CIRCLE_BUILD_NUM": "circleci-pipeline-number", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", + "CIRCLE_JOB": "circleci-job-name", + "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", + "CIRCLE_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id" + }, + { + "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", + "ci.job.name": "circleci-job-name", + "ci.job.url": "https://circleci-build-url.com/", + "ci.pipeline.id": "circleci-pipeline-id", + "ci.pipeline.name": "circleci-pipeline-name", + "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", + "ci.provider.name": "circleci", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb.git" + } + ], + [ + { + "CIRCLECI": "circleCI", + "CIRCLE_BUILD_NUM": "circleci-pipeline-number", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", + "CIRCLE_JOB": "circleci-job-name", + "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", + "CIRCLE_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id" + }, + { + "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", + "ci.job.name": "circleci-job-name", + "ci.job.url": "https://circleci-build-url.com/", + "ci.pipeline.id": "circleci-pipeline-id", + "ci.pipeline.name": "circleci-pipeline-name", + "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", + "ci.provider.name": "circleci", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + } + ], + [ + { + "CIRCLECI": "circleCI", + "CIRCLE_BUILD_NUM": "circleci-pipeline-number", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", + "CIRCLE_JOB": "circleci-job-name", + "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", + "CIRCLE_REPOSITORY_URL": "ssh://user@host.xz:port/path/to/repo.git/", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id" + }, + { + "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", + "ci.job.name": "circleci-job-name", + "ci.job.url": "https://circleci-build-url.com/", + "ci.pipeline.id": "circleci-pipeline-id", + "ci.pipeline.name": "circleci-pipeline-name", + "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", + "ci.provider.name": "circleci", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], + [ + { + "CIRCLECI": "circleCI", + "CIRCLE_BUILD_NUM": "circleci-pipeline-number", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", + "CIRCLE_JOB": "circleci-job-name", + "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", + "CIRCLE_REPOSITORY_URL": "ssh://user:password@host.xz:port/path/to/repo.git/", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id" + }, + { + "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", + "ci.job.name": "circleci-job-name", + "ci.job.url": "https://circleci-build-url.com/", + "ci.pipeline.id": "circleci-pipeline-id", + "ci.pipeline.name": "circleci-pipeline-name", + "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", + "ci.provider.name": "circleci", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } ] ] diff --git a/packages/dd-trace/test/plugins/util/ci-env/codefresh.json b/packages/dd-trace/test/plugins/util/ci-env/codefresh.json index d719df10592..7b1367b4f09 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/codefresh.json +++ b/packages/dd-trace/test/plugins/util/ci-env/codefresh.json @@ -158,5 +158,167 @@ "git.repository_url": "git@github.com:DataDog/userrepo.git", "git.tag": "0.0.2" } + ], + [ + { + "CF_BUILD_ID": "6410367cee516146a4c4c66e", + "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", + "CF_STEP_NAME": "mah-job-name", + "DD_GIT_REPOSITORY_URL": "https://user:password@github.com/DataDog/dogweb.git" + }, + { + "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", + "ci.job.name": "mah-job-name", + "ci.pipeline.id": "6410367cee516146a4c4c66e", + "ci.pipeline.name": "My simple project/Example Java Project Pipeline", + "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "ci.provider.name": "codefresh", + "git.repository_url": "https://github.com/DataDog/dogweb.git" + } + ], + [ + { + "CF_BUILD_ID": "6410367cee516146a4c4c66e", + "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", + "CF_STEP_NAME": "mah-job-name", + "DD_GIT_REPOSITORY_URL": "https://user@github.com/DataDog/dogweb.git" + }, + { + "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", + "ci.job.name": "mah-job-name", + "ci.pipeline.id": "6410367cee516146a4c4c66e", + "ci.pipeline.name": "My simple project/Example Java Project Pipeline", + "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "ci.provider.name": "codefresh", + "git.repository_url": "https://github.com/DataDog/dogweb.git" + } + ], + [ + { + "CF_BUILD_ID": "6410367cee516146a4c4c66e", + "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", + "CF_STEP_NAME": "mah-job-name", + "DD_GIT_REPOSITORY_URL": "https://user:password@github.com:1234/DataDog/dogweb.git" + }, + { + "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", + "ci.job.name": "mah-job-name", + "ci.pipeline.id": "6410367cee516146a4c4c66e", + "ci.pipeline.name": "My simple project/Example Java Project Pipeline", + "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "ci.provider.name": "codefresh", + "git.repository_url": "https://github.com:1234/DataDog/dogweb.git" + } + ], + [ + { + "CF_BUILD_ID": "6410367cee516146a4c4c66e", + "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", + "CF_STEP_NAME": "mah-job-name", + "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1/DataDog/dogweb.git" + }, + { + "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", + "ci.job.name": "mah-job-name", + "ci.pipeline.id": "6410367cee516146a4c4c66e", + "ci.pipeline.name": "My simple project/Example Java Project Pipeline", + "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "ci.provider.name": "codefresh", + "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" + } + ], + [ + { + "CF_BUILD_ID": "6410367cee516146a4c4c66e", + "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", + "CF_STEP_NAME": "mah-job-name", + "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1/DataDog/dogweb.git" + }, + { + "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", + "ci.job.name": "mah-job-name", + "ci.pipeline.id": "6410367cee516146a4c4c66e", + "ci.pipeline.name": "My simple project/Example Java Project Pipeline", + "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "ci.provider.name": "codefresh", + "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" + } + ], + [ + { + "CF_BUILD_ID": "6410367cee516146a4c4c66e", + "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", + "CF_STEP_NAME": "mah-job-name", + "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb.git" + }, + { + "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", + "ci.job.name": "mah-job-name", + "ci.pipeline.id": "6410367cee516146a4c4c66e", + "ci.pipeline.name": "My simple project/Example Java Project Pipeline", + "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "ci.provider.name": "codefresh", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb.git" + } + ], + [ + { + "CF_BUILD_ID": "6410367cee516146a4c4c66e", + "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", + "CF_STEP_NAME": "mah-job-name", + "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + }, + { + "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", + "ci.job.name": "mah-job-name", + "ci.pipeline.id": "6410367cee516146a4c4c66e", + "ci.pipeline.name": "My simple project/Example Java Project Pipeline", + "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "ci.provider.name": "codefresh", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + } + ], + [ + { + "CF_BUILD_ID": "6410367cee516146a4c4c66e", + "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", + "CF_STEP_NAME": "mah-job-name", + "DD_GIT_REPOSITORY_URL": "ssh://user@host.xz:port/path/to/repo.git/" + }, + { + "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", + "ci.job.name": "mah-job-name", + "ci.pipeline.id": "6410367cee516146a4c4c66e", + "ci.pipeline.name": "My simple project/Example Java Project Pipeline", + "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "ci.provider.name": "codefresh", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], + [ + { + "CF_BUILD_ID": "6410367cee516146a4c4c66e", + "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", + "CF_STEP_NAME": "mah-job-name", + "DD_GIT_REPOSITORY_URL": "ssh://user:password@host.xz:port/path/to/repo.git/" + }, + { + "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", + "ci.job.name": "mah-job-name", + "ci.pipeline.id": "6410367cee516146a4c4c66e", + "ci.pipeline.name": "My simple project/Example Java Project Pipeline", + "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", + "ci.provider.name": "codefresh", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } ] ] diff --git a/packages/dd-trace/test/plugins/util/ci-env/github.json b/packages/dd-trace/test/plugins/util/ci-env/github.json index e5df52c58ba..3dd5ac62d54 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/github.json +++ b/packages/dd-trace/test/plugins/util/ci-env/github.json @@ -5,6 +5,7 @@ "GITHUB_JOB": "github-job-name", "GITHUB_REF": "master", "GITHUB_REPOSITORY": "ghactions-repo", + "GITHUB_RUN_ATTEMPT": "ghactions-run-attempt", "GITHUB_RUN_ID": "ghactions-pipeline-id", "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", "GITHUB_SERVER_URL": "https://ghenterprise.com", @@ -13,13 +14,13 @@ "GITHUB_WORKSPACE": "/foo/bar" }, { - "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://ghenterprise.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\"}", + "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://ghenterprise.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", "ci.job.name": "github-job-name", "ci.job.url": "https://ghenterprise.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", "ci.pipeline.id": "ghactions-pipeline-id", "ci.pipeline.name": "ghactions-pipeline-name", "ci.pipeline.number": "ghactions-pipeline-number", - "ci.pipeline.url": "https://ghenterprise.com/ghactions-repo/actions/runs/ghactions-pipeline-id", + "ci.pipeline.url": "https://ghenterprise.com/ghactions-repo/actions/runs/ghactions-pipeline-id/attempts/ghactions-run-attempt", "ci.provider.name": "github", "ci.workspace_path": "/foo/bar", "git.branch": "master", @@ -560,5 +561,154 @@ "git.repository_url": "git@github.com:DataDog/userrepo.git", "git.tag": "0.0.2" } + ], + [ + { + "GITHUB_ACTION": "run", + "GITHUB_JOB": "github-job-name", + "GITHUB_REPOSITORY": "ghactions-repo", + "GITHUB_RUN_ID": "ghactions-pipeline-id", + "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", + "GITHUB_SERVER_URL": "https://github.com", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GITHUB_WORKFLOW": "ghactions-pipeline-name" + }, + { + "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\"}", + "ci.job.name": "github-job-name", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", + "ci.pipeline.id": "ghactions-pipeline-id", + "ci.pipeline.name": "ghactions-pipeline-name", + "ci.pipeline.number": "ghactions-pipeline-number", + "ci.pipeline.url": "https://github.com/ghactions-repo/actions/runs/ghactions-pipeline-id", + "ci.provider.name": "github", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/ghactions-repo.git" + } + ], + [ + { + "GITHUB_ACTION": "run", + "GITHUB_JOB": "github-job-name", + "GITHUB_REPOSITORY": "ghactions-repo", + "GITHUB_RUN_ATTEMPT": "ghactions-run-attempt", + "GITHUB_RUN_ID": "ghactions-pipeline-id", + "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", + "GITHUB_SERVER_URL": "https://user:password@github.com", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GITHUB_WORKFLOW": "ghactions-pipeline-name" + }, + { + "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", + "ci.job.name": "github-job-name", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", + "ci.pipeline.id": "ghactions-pipeline-id", + "ci.pipeline.name": "ghactions-pipeline-name", + "ci.pipeline.number": "ghactions-pipeline-number", + "ci.pipeline.url": "https://github.com/ghactions-repo/actions/runs/ghactions-pipeline-id/attempts/ghactions-run-attempt", + "ci.provider.name": "github", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/ghactions-repo.git" + } + ], + [ + { + "GITHUB_ACTION": "run", + "GITHUB_JOB": "github-job-name", + "GITHUB_REPOSITORY": "ghactions-repo", + "GITHUB_RUN_ATTEMPT": "ghactions-run-attempt", + "GITHUB_RUN_ID": "ghactions-pipeline-id", + "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", + "GITHUB_SERVER_URL": "https://user@github.com", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GITHUB_WORKFLOW": "ghactions-pipeline-name" + }, + { + "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", + "ci.job.name": "github-job-name", + "ci.job.url": "https://github.com/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", + "ci.pipeline.id": "ghactions-pipeline-id", + "ci.pipeline.name": "ghactions-pipeline-name", + "ci.pipeline.number": "ghactions-pipeline-number", + "ci.pipeline.url": "https://github.com/ghactions-repo/actions/runs/ghactions-pipeline-id/attempts/ghactions-run-attempt", + "ci.provider.name": "github", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/ghactions-repo.git" + } + ], + [ + { + "GITHUB_ACTION": "run", + "GITHUB_JOB": "github-job-name", + "GITHUB_REPOSITORY": "ghactions-repo", + "GITHUB_RUN_ATTEMPT": "ghactions-run-attempt", + "GITHUB_RUN_ID": "ghactions-pipeline-id", + "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", + "GITHUB_SERVER_URL": "https://user:password@github.com:1234", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GITHUB_WORKFLOW": "ghactions-pipeline-name" + }, + { + "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://github.com:1234\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", + "ci.job.name": "github-job-name", + "ci.job.url": "https://github.com:1234/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", + "ci.pipeline.id": "ghactions-pipeline-id", + "ci.pipeline.name": "ghactions-pipeline-name", + "ci.pipeline.number": "ghactions-pipeline-number", + "ci.pipeline.url": "https://github.com:1234/ghactions-repo/actions/runs/ghactions-pipeline-id/attempts/ghactions-run-attempt", + "ci.provider.name": "github", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com:1234/ghactions-repo.git" + } + ], + [ + { + "GITHUB_ACTION": "run", + "GITHUB_JOB": "github-job-name", + "GITHUB_REPOSITORY": "ghactions-repo", + "GITHUB_RUN_ATTEMPT": "ghactions-run-attempt", + "GITHUB_RUN_ID": "ghactions-pipeline-id", + "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", + "GITHUB_SERVER_URL": "https://user:password@1.1.1.1", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GITHUB_WORKFLOW": "ghactions-pipeline-name" + }, + { + "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://1.1.1.1\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", + "ci.job.name": "github-job-name", + "ci.job.url": "https://1.1.1.1/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", + "ci.pipeline.id": "ghactions-pipeline-id", + "ci.pipeline.name": "ghactions-pipeline-name", + "ci.pipeline.number": "ghactions-pipeline-number", + "ci.pipeline.url": "https://1.1.1.1/ghactions-repo/actions/runs/ghactions-pipeline-id/attempts/ghactions-run-attempt", + "ci.provider.name": "github", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1/ghactions-repo.git" + } + ], + [ + { + "GITHUB_ACTION": "run", + "GITHUB_JOB": "github-job-name", + "GITHUB_REPOSITORY": "ghactions-repo", + "GITHUB_RUN_ATTEMPT": "ghactions-run-attempt", + "GITHUB_RUN_ID": "ghactions-pipeline-id", + "GITHUB_RUN_NUMBER": "ghactions-pipeline-number", + "GITHUB_SERVER_URL": "https://user:password@1.1.1.1:1234", + "GITHUB_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GITHUB_WORKFLOW": "ghactions-pipeline-name" + }, + { + "_dd.ci.env_vars": "{\"GITHUB_SERVER_URL\":\"https://1.1.1.1:1234\",\"GITHUB_REPOSITORY\":\"ghactions-repo\",\"GITHUB_RUN_ID\":\"ghactions-pipeline-id\",\"GITHUB_RUN_ATTEMPT\":\"ghactions-run-attempt\"}", + "ci.job.name": "github-job-name", + "ci.job.url": "https://1.1.1.1:1234/ghactions-repo/commit/b9f0fb3fdbb94c9d24b2c75b49663122a529e123/checks", + "ci.pipeline.id": "ghactions-pipeline-id", + "ci.pipeline.name": "ghactions-pipeline-name", + "ci.pipeline.number": "ghactions-pipeline-number", + "ci.pipeline.url": "https://1.1.1.1:1234/ghactions-repo/actions/runs/ghactions-pipeline-id/attempts/ghactions-run-attempt", + "ci.provider.name": "github", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/ghactions-repo.git" + } ] ] diff --git a/packages/dd-trace/test/plugins/util/ci-env/gitlab.json b/packages/dd-trace/test/plugins/util/ci-env/gitlab.json index c1879ed80bd..400d99c977d 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/gitlab.json +++ b/packages/dd-trace/test/plugins/util/ci-env/gitlab.json @@ -668,17 +668,7 @@ "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", "CI_PROJECT_URL": "https://gitlab.com/repo", - "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", - "DD_GIT_BRANCH": "user-supplied-branch", - "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", - "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", - "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", - "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", - "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", - "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", - "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", - "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", - "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git", + "CI_REPOSITORY_URL": "http://user:pwd@hostname.com:1234/repo.git", "GITLAB_CI": "gitlab" }, { @@ -692,16 +682,133 @@ "ci.provider.name": "gitlab", "ci.stage.name": "gitlab-stage-name", "ci.workspace_path": "/foo/bar", - "git.branch": "user-supplied-branch", - "git.commit.author.date": "usersupplied-authordate", - "git.commit.author.email": "usersupplied-authoremail", - "git.commit.author.name": "usersupplied-authorname", - "git.commit.committer.date": "usersupplied-comitterdate", - "git.commit.committer.email": "usersupplied-comitteremail", - "git.commit.committer.name": "usersupplied-comittername", - "git.commit.message": "usersupplied-message", + "git.branch": "master", + "git.commit.author.date": "2021-07-21T11:43:07-04:00", + "git.commit.author.email": "john@doe.com", + "git.commit.author.name": "John Doe", + "git.commit.message": "gitlab-git-commit-message", "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", - "git.repository_url": "git@github.com:DataDog/userrepo.git" + "git.repository_url": "http://hostname.com:1234/repo.git" + } + ], + [ + { + "CI_COMMIT_AUTHOR": "John Doe ", + "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", + "CI_JOB_ID": "gitlab-job-id", + "CI_JOB_NAME": "gitlab-job-name", + "CI_JOB_STAGE": "gitlab-stage-name", + "CI_JOB_URL": "https://gitlab.com/job", + "CI_PIPELINE_ID": "gitlab-pipeline-id", + "CI_PIPELINE_IID": "gitlab-pipeline-number", + "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", + "CI_PROJECT_DIR": "/foo/bar", + "CI_PROJECT_PATH": "gitlab-pipeline-name", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "http://user:pwd@1.1.1.1/repo.git", + "GITLAB_CI": "gitlab" + }, + { + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "ci.job.name": "gitlab-job-name", + "ci.job.url": "https://gitlab.com/job", + "ci.pipeline.id": "gitlab-pipeline-id", + "ci.pipeline.name": "gitlab-pipeline-name", + "ci.pipeline.number": "gitlab-pipeline-number", + "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", + "ci.provider.name": "gitlab", + "ci.stage.name": "gitlab-stage-name", + "ci.workspace_path": "/foo/bar", + "git.branch": "master", + "git.commit.author.date": "2021-07-21T11:43:07-04:00", + "git.commit.author.email": "john@doe.com", + "git.commit.author.name": "John Doe", + "git.commit.message": "gitlab-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "http://1.1.1.1/repo.git" + } + ], + [ + { + "CI_COMMIT_AUTHOR": "John Doe ", + "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", + "CI_JOB_ID": "gitlab-job-id", + "CI_JOB_NAME": "gitlab-job-name", + "CI_JOB_STAGE": "gitlab-stage-name", + "CI_JOB_URL": "https://gitlab.com/job", + "CI_PIPELINE_ID": "gitlab-pipeline-id", + "CI_PIPELINE_IID": "gitlab-pipeline-number", + "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", + "CI_PROJECT_DIR": "/foo/bar", + "CI_PROJECT_PATH": "gitlab-pipeline-name", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "http://user:pwd@1.1.1.1:1234/repo.git", + "GITLAB_CI": "gitlab" + }, + { + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "ci.job.name": "gitlab-job-name", + "ci.job.url": "https://gitlab.com/job", + "ci.pipeline.id": "gitlab-pipeline-id", + "ci.pipeline.name": "gitlab-pipeline-name", + "ci.pipeline.number": "gitlab-pipeline-number", + "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", + "ci.provider.name": "gitlab", + "ci.stage.name": "gitlab-stage-name", + "ci.workspace_path": "/foo/bar", + "git.branch": "master", + "git.commit.author.date": "2021-07-21T11:43:07-04:00", + "git.commit.author.email": "john@doe.com", + "git.commit.author.name": "John Doe", + "git.commit.message": "gitlab-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "http://1.1.1.1:1234/repo.git" + } + ], + [ + { + "CI_COMMIT_AUTHOR": "John Doe ", + "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", + "CI_JOB_ID": "gitlab-job-id", + "CI_JOB_NAME": "gitlab-job-name", + "CI_JOB_STAGE": "gitlab-stage-name", + "CI_JOB_URL": "https://gitlab.com/job", + "CI_PIPELINE_ID": "gitlab-pipeline-id", + "CI_PIPELINE_IID": "gitlab-pipeline-number", + "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", + "CI_PROJECT_DIR": "/foo/bar", + "CI_PROJECT_PATH": "gitlab-pipeline-name", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "http://user:pwd@1.1.1.1:1234/repo_with_@_yeah.git", + "GITLAB_CI": "gitlab" + }, + { + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "ci.job.name": "gitlab-job-name", + "ci.job.url": "https://gitlab.com/job", + "ci.pipeline.id": "gitlab-pipeline-id", + "ci.pipeline.name": "gitlab-pipeline-name", + "ci.pipeline.number": "gitlab-pipeline-number", + "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", + "ci.provider.name": "gitlab", + "ci.stage.name": "gitlab-stage-name", + "ci.workspace_path": "/foo/bar", + "git.branch": "master", + "git.commit.author.date": "2021-07-21T11:43:07-04:00", + "git.commit.author.email": "john@doe.com", + "git.commit.author.name": "John Doe", + "git.commit.message": "gitlab-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "http://1.1.1.1:1234/repo_with_@_yeah.git" } ], [ @@ -722,6 +829,7 @@ "CI_PROJECT_PATH": "gitlab-pipeline-name", "CI_PROJECT_URL": "https://gitlab.com/repo", "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", + "DD_GIT_BRANCH": "user-supplied-branch", "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", @@ -731,7 +839,6 @@ "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git", - "DD_GIT_TAG": "0.0.2", "GITLAB_CI": "gitlab" }, { @@ -745,7 +852,7 @@ "ci.provider.name": "gitlab", "ci.stage.name": "gitlab-stage-name", "ci.workspace_path": "/foo/bar", - "git.branch": "master", + "git.branch": "user-supplied-branch", "git.commit.author.date": "usersupplied-authordate", "git.commit.author.email": "usersupplied-authoremail", "git.commit.author.name": "usersupplied-authorname", @@ -754,8 +861,7 @@ "git.commit.committer.name": "usersupplied-comittername", "git.commit.message": "usersupplied-message", "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", - "git.repository_url": "git@github.com:DataDog/userrepo.git", - "git.tag": "0.0.2" + "git.repository_url": "git@github.com:DataDog/userrepo.git" } ], [ @@ -775,7 +881,17 @@ "CI_PROJECT_DIR": "/foo/bar", "CI_PROJECT_PATH": "gitlab-pipeline-name", "CI_PROJECT_URL": "https://gitlab.com/repo", - "CI_REPOSITORY_URL": "https://user:password@gitlab.com/DataDog/dogweb.git", + "CI_REPOSITORY_URL": "https://gitlab.com/repo/myrepo.git", + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "git@github.com:DataDog/userrepo.git", + "DD_GIT_TAG": "0.0.2", "GITLAB_CI": "gitlab" }, { @@ -790,12 +906,16 @@ "ci.stage.name": "gitlab-stage-name", "ci.workspace_path": "/foo/bar", "git.branch": "master", - "git.commit.author.date": "2021-07-21T11:43:07-04:00", - "git.commit.author.email": "john@doe.com", - "git.commit.author.name": "John Doe", - "git.commit.message": "gitlab-git-commit-message", + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", - "git.repository_url": "https://gitlab.com/DataDog/dogweb.git" + "git.repository_url": "git@github.com:DataDog/userrepo.git", + "git.tag": "0.0.2" } ], [ diff --git a/packages/dd-trace/test/plugins/util/ci-env/jenkins.json b/packages/dd-trace/test/plugins/util/ci-env/jenkins.json index 3e791b5f7ff..f87cdbd2a36 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/jenkins.json +++ b/packages/dd-trace/test/plugins/util/ci-env/jenkins.json @@ -687,6 +687,153 @@ "git.repository_url": "https://github.com/DataDog/dogweb.git" } ], + [ + { + "BUILD_NUMBER": "jenkins-pipeline-number", + "BUILD_TAG": "jenkins-pipeline-id", + "BUILD_URL": "https://jenkins.com/pipeline", + "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL_1": "https://user@github.com/DataDog/dogweb.git", + "JENKINS_URL": "jenkins", + "JOB_URL": "https://jenkins.com/job" + }, + { + "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", + "ci.pipeline.id": "jenkins-pipeline-id", + "ci.pipeline.number": "jenkins-pipeline-number", + "ci.pipeline.url": "https://jenkins.com/pipeline", + "ci.provider.name": "jenkins", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/DataDog/dogweb.git" + } + ], + [ + { + "BUILD_NUMBER": "jenkins-pipeline-number", + "BUILD_TAG": "jenkins-pipeline-id", + "BUILD_URL": "https://jenkins.com/pipeline", + "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL_1": "https://user:password@github.com:1234/DataDog/dogweb.git", + "JENKINS_URL": "jenkins", + "JOB_URL": "https://jenkins.com/job" + }, + { + "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", + "ci.pipeline.id": "jenkins-pipeline-id", + "ci.pipeline.number": "jenkins-pipeline-number", + "ci.pipeline.url": "https://jenkins.com/pipeline", + "ci.provider.name": "jenkins", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com:1234/DataDog/dogweb.git" + } + ], + [ + { + "BUILD_NUMBER": "jenkins-pipeline-number", + "BUILD_TAG": "jenkins-pipeline-id", + "BUILD_URL": "https://jenkins.com/pipeline", + "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL_1": "https://user:password@1.1.1.1/DataDog/dogweb.git", + "JENKINS_URL": "jenkins", + "JOB_URL": "https://jenkins.com/job" + }, + { + "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", + "ci.pipeline.id": "jenkins-pipeline-id", + "ci.pipeline.number": "jenkins-pipeline-number", + "ci.pipeline.url": "https://jenkins.com/pipeline", + "ci.provider.name": "jenkins", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" + } + ], + [ + { + "BUILD_NUMBER": "jenkins-pipeline-number", + "BUILD_TAG": "jenkins-pipeline-id", + "BUILD_URL": "https://jenkins.com/pipeline", + "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL_1": "https://user:password@1.1.1.1:1234/DataDog/dogweb.git", + "JENKINS_URL": "jenkins", + "JOB_URL": "https://jenkins.com/job" + }, + { + "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", + "ci.pipeline.id": "jenkins-pipeline-id", + "ci.pipeline.number": "jenkins-pipeline-number", + "ci.pipeline.url": "https://jenkins.com/pipeline", + "ci.provider.name": "jenkins", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb.git" + } + ], + [ + { + "BUILD_NUMBER": "jenkins-pipeline-number", + "BUILD_TAG": "jenkins-pipeline-id", + "BUILD_URL": "https://jenkins.com/pipeline", + "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL_1": "https://user:password@1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git", + "JENKINS_URL": "jenkins", + "JOB_URL": "https://jenkins.com/job" + }, + { + "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", + "ci.pipeline.id": "jenkins-pipeline-id", + "ci.pipeline.number": "jenkins-pipeline-number", + "ci.pipeline.url": "https://jenkins.com/pipeline", + "ci.provider.name": "jenkins", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + } + ], + [ + { + "BUILD_NUMBER": "jenkins-pipeline-number", + "BUILD_TAG": "jenkins-pipeline-id", + "BUILD_URL": "https://jenkins.com/pipeline", + "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL_1": "ssh://user@host.xz:port/path/to/repo.git/", + "JENKINS_URL": "jenkins", + "JOB_URL": "https://jenkins.com/job" + }, + { + "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", + "ci.pipeline.id": "jenkins-pipeline-id", + "ci.pipeline.number": "jenkins-pipeline-number", + "ci.pipeline.url": "https://jenkins.com/pipeline", + "ci.provider.name": "jenkins", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], + [ + { + "BUILD_NUMBER": "jenkins-pipeline-number", + "BUILD_TAG": "jenkins-pipeline-id", + "BUILD_URL": "https://jenkins.com/pipeline", + "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL_1": "ssh://user:password@host.xz:port/path/to/repo.git/", + "JENKINS_URL": "jenkins", + "JOB_URL": "https://jenkins.com/job" + }, + { + "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", + "ci.pipeline.id": "jenkins-pipeline-id", + "ci.pipeline.number": "jenkins-pipeline-number", + "ci.pipeline.url": "https://jenkins.com/pipeline", + "ci.provider.name": "jenkins", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", diff --git a/packages/dd-trace/test/plugins/util/ci-env/teamcity.json b/packages/dd-trace/test/plugins/util/ci-env/teamcity.json index 086c1c16de1..037887c4ae0 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/teamcity.json +++ b/packages/dd-trace/test/plugins/util/ci-env/teamcity.json @@ -74,5 +74,117 @@ "git.repository_url": "git@github.com:DataDog/userrepo.git", "git.tag": "0.0.2" } + ], + [ + { + "BUILD_URL": "https://teamcity.com/repo", + "DD_GIT_REPOSITORY_URL": "https://user:password@github.com/DataDog/dogweb.git", + "TEAMCITY_BUILDCONF_NAME": "Test 1", + "TEAMCITY_VERSION": "2022.10 (build 116751)" + }, + { + "ci.job.name": "Test 1", + "ci.job.url": "https://teamcity.com/repo", + "ci.provider.name": "teamcity", + "git.repository_url": "https://github.com/DataDog/dogweb.git" + } + ], + [ + { + "BUILD_URL": "https://teamcity.com/repo", + "DD_GIT_REPOSITORY_URL": "https://user@github.com/DataDog/dogweb.git", + "TEAMCITY_BUILDCONF_NAME": "Test 1", + "TEAMCITY_VERSION": "2022.10 (build 116751)" + }, + { + "ci.job.name": "Test 1", + "ci.job.url": "https://teamcity.com/repo", + "ci.provider.name": "teamcity", + "git.repository_url": "https://github.com/DataDog/dogweb.git" + } + ], + [ + { + "BUILD_URL": "https://teamcity.com/repo", + "DD_GIT_REPOSITORY_URL": "https://user:password@github.com:1234/DataDog/dogweb.git", + "TEAMCITY_BUILDCONF_NAME": "Test 1", + "TEAMCITY_VERSION": "2022.10 (build 116751)" + }, + { + "ci.job.name": "Test 1", + "ci.job.url": "https://teamcity.com/repo", + "ci.provider.name": "teamcity", + "git.repository_url": "https://github.com:1234/DataDog/dogweb.git" + } + ], + [ + { + "BUILD_URL": "https://teamcity.com/repo", + "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1/DataDog/dogweb.git", + "TEAMCITY_BUILDCONF_NAME": "Test 1", + "TEAMCITY_VERSION": "2022.10 (build 116751)" + }, + { + "ci.job.name": "Test 1", + "ci.job.url": "https://teamcity.com/repo", + "ci.provider.name": "teamcity", + "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" + } + ], + [ + { + "BUILD_URL": "https://teamcity.com/repo", + "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb.git", + "TEAMCITY_BUILDCONF_NAME": "Test 1", + "TEAMCITY_VERSION": "2022.10 (build 116751)" + }, + { + "ci.job.name": "Test 1", + "ci.job.url": "https://teamcity.com/repo", + "ci.provider.name": "teamcity", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb.git" + } + ], + [ + { + "BUILD_URL": "https://teamcity.com/repo", + "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git", + "TEAMCITY_BUILDCONF_NAME": "Test 1", + "TEAMCITY_VERSION": "2022.10 (build 116751)" + }, + { + "ci.job.name": "Test 1", + "ci.job.url": "https://teamcity.com/repo", + "ci.provider.name": "teamcity", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + } + ], + [ + { + "BUILD_URL": "https://teamcity.com/repo", + "DD_GIT_REPOSITORY_URL": "ssh://user@host.xz:port/path/to/repo.git/", + "TEAMCITY_BUILDCONF_NAME": "Test 1", + "TEAMCITY_VERSION": "2022.10 (build 116751)" + }, + { + "ci.job.name": "Test 1", + "ci.job.url": "https://teamcity.com/repo", + "ci.provider.name": "teamcity", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], + [ + { + "BUILD_URL": "https://teamcity.com/repo", + "DD_GIT_REPOSITORY_URL": "ssh://user:password@host.xz:port/path/to/repo.git/", + "TEAMCITY_BUILDCONF_NAME": "Test 1", + "TEAMCITY_VERSION": "2022.10 (build 116751)" + }, + { + "ci.job.name": "Test 1", + "ci.job.url": "https://teamcity.com/repo", + "ci.provider.name": "teamcity", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } ] ] diff --git a/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json b/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json index 3e770927ebf..464c4158558 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json +++ b/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json @@ -178,5 +178,173 @@ "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", "git.repository_url": "https://github.com/DataDog/dogweb.git" } + ], + [ + { + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "https://user@github.com/DataDog/dogweb.git" + }, + { + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/DataDog/dogweb.git" + } + ], + [ + { + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "https://user:password@github.com:1234/DataDog/dogweb.git" + }, + { + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com:1234/DataDog/dogweb.git" + } + ], + [ + { + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1/DataDog/dogweb.git" + }, + { + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" + } + ], + [ + { + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb.git" + }, + { + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb.git" + } + ], + [ + { + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + }, + { + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" + } + ], + [ + { + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "ssh://user@host.xz:port/path/to/repo.git/" + }, + { + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } + ], + [ + { + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "ssh://user:password@host.xz:port/path/to/repo.git/" + }, + { + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" + } ] ] diff --git a/packages/dd-trace/test/plugins/util/git.spec.js b/packages/dd-trace/test/plugins/util/git.spec.js index 83c4905968a..90553564f98 100644 --- a/packages/dd-trace/test/plugins/util/git.spec.js +++ b/packages/dd-trace/test/plugins/util/git.spec.js @@ -89,14 +89,14 @@ describe('git', () => { }) it('does not crash if git is not available', () => { sanitizedExecStub.returns('') - const ciMetadata = { repositoryUrl: 'ciRepositoryUrl' } + const ciMetadata = { repositoryUrl: 'https://github.com/datadog/safe-repository.git' } const metadata = getGitMetadata(ciMetadata) expect(metadata).to.eql({ [GIT_BRANCH]: '', [GIT_TAG]: undefined, [GIT_COMMIT_MESSAGE]: '', [GIT_COMMIT_SHA]: '', - [GIT_REPOSITORY_URL]: 'ciRepositoryUrl', + [GIT_REPOSITORY_URL]: 'https://github.com/datadog/safe-repository.git', [GIT_COMMIT_COMMITTER_EMAIL]: undefined, [GIT_COMMIT_COMMITTER_DATE]: undefined, [GIT_COMMIT_COMMITTER_NAME]: undefined, @@ -112,7 +112,7 @@ describe('git', () => { 'git author,git.author@email.com,2022-02-14T16:22:03-05:00,' + 'git committer,git.committer@email.com,2022-02-14T16:23:03-05:00' ) - .onCall(1).returns('gitRepositoryUrl') + .onCall(1).returns('https://github.com/datadog/safe-repository.git') .onCall(2).returns('this is a commit message') .onCall(3).returns('gitBranch') .onCall(4).returns('gitCommitSHA') @@ -124,7 +124,7 @@ describe('git', () => { [GIT_TAG]: 'ciTag', [GIT_COMMIT_MESSAGE]: 'this is a commit message', [GIT_COMMIT_SHA]: 'gitCommitSHA', - [GIT_REPOSITORY_URL]: 'gitRepositoryUrl', + [GIT_REPOSITORY_URL]: 'https://github.com/datadog/safe-repository.git', [GIT_COMMIT_AUTHOR_EMAIL]: 'git.author@email.com', [GIT_COMMIT_AUTHOR_DATE]: '2022-02-14T16:22:03-05:00', [GIT_COMMIT_AUTHOR_NAME]: 'git author', @@ -321,3 +321,34 @@ describe('unshallowRepository', () => { expect(sanitizedExecStub).to.have.been.calledWith('git', options) }) }) + +describe('user credentials', () => { + afterEach(() => { + sanitizedExecStub.reset() + execFileSyncStub.reset() + }) + it('scrubs https user credentials', () => { + sanitizedExecStub + .onCall(0).returns( + 'git author,git.author@email.com,2022-02-14T16:22:03-05:00,' + + 'git committer,git.committer@email.com,2022-02-14T16:23:03-05:00' + ) + .onCall(1).returns('https://x-oauth-basic:ghp_safe_characters@github.com/datadog/safe-repository.git') + + const metadata = getGitMetadata({}) + expect(metadata[GIT_REPOSITORY_URL]) + .to.equal('https://github.com/datadog/safe-repository.git') + }) + it('scrubs ssh user credentials', () => { + sanitizedExecStub + .onCall(0).returns( + 'git author,git.author@email.com,2022-02-14T16:22:03-05:00,' + + 'git committer,git.committer@email.com,2022-02-14T16:23:03-05:00' + ) + .onCall(1).returns('ssh://username@host.xz:port/path/to/repo.git/') + + const metadata = getGitMetadata({}) + expect(metadata[GIT_REPOSITORY_URL]) + .to.equal('ssh://host.xz:port/path/to/repo.git/') + }) +}) diff --git a/packages/dd-trace/test/plugins/util/url.spec.js b/packages/dd-trace/test/plugins/util/url.spec.js new file mode 100644 index 00000000000..dc1d0c2d15a --- /dev/null +++ b/packages/dd-trace/test/plugins/util/url.spec.js @@ -0,0 +1,40 @@ +'use strict' + +require('../../setup/tap') + +const { filterSensitiveInfoFromRepository } = require('../../../src/plugins/util/url') + +describe('filterSensitiveInfoFromRepository', () => { + it('returns the same url if no sensitive info is present', () => { + const urls = [ + 'http://example.com/repository.git', + 'https://datadog.com/repository.git', + 'ssh://host.xz:port/path/to/repo.git/', + 'git@github.com:DataDog/dd-trace-js.git' + ] + urls.forEach(url => { + expect(filterSensitiveInfoFromRepository(url)).to.equal(url) + }) + }) + it('returns the scrubbed url if credentials are present', () => { + const sensitiveUrls = [ + 'https://username:password@datadog.com/repository.git', + 'ssh://username@host.xz:port/path/to/repo.git/', + 'https://username@datadog.com/repository.git' + ] + expect(filterSensitiveInfoFromRepository(sensitiveUrls[0])).to.equal('https://datadog.com/repository.git') + expect(filterSensitiveInfoFromRepository(sensitiveUrls[1])).to.equal('ssh://host.xz:port/path/to/repo.git/') + expect(filterSensitiveInfoFromRepository(sensitiveUrls[2])).to.equal('https://datadog.com/repository.git') + }) + it('does not crash for empty or invalid repository URLs', () => { + const invalidUrls = [ + null, + '', + undefined, + '1+1=2' + ] + invalidUrls.forEach(url => { + expect(filterSensitiveInfoFromRepository(url)).to.equal('') + }) + }) +}) From bfae7ad605945681b51928f1dc008c372cea9377 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Mon, 13 Nov 2023 16:07:09 +0100 Subject: [PATCH 061/147] Eagerly release cached reference to web span tags when the span ends (#3781) --- packages/dd-trace/src/opentracing/span.js | 4 ++++ packages/dd-trace/src/profiling/profilers/wall.js | 10 ++++++++++ 2 files changed, 14 insertions(+) diff --git a/packages/dd-trace/src/opentracing/span.js b/packages/dd-trace/src/opentracing/span.js index 230d5625f3b..86e0c5d12ed 100644 --- a/packages/dd-trace/src/opentracing/span.js +++ b/packages/dd-trace/src/opentracing/span.js @@ -12,6 +12,7 @@ const runtimeMetrics = require('../runtime_metrics') const log = require('../log') const { storage } = require('../../../datadog-core') const telemetryMetrics = require('../telemetry/metrics') +const { channel } = require('dc-polyfill') const tracerMetrics = telemetryMetrics.manager.namespace('tracers') @@ -30,6 +31,8 @@ const integrationCounters = { span_finished: {} } +const finishCh = channel('dd-trace:span:finish') + function getIntegrationCounter (event, integration) { const counters = integrationCounters[event] @@ -176,6 +179,7 @@ class DatadogSpan { this._duration = finishTime - this._startTime this._spanContext._trace.finished.push(this) this._spanContext._isFinished = true + finishCh.publish(this) this._processor.process(this) } diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index 82d92d0e24c..b5c83d412c3 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -10,6 +10,7 @@ const telemetryMetrics = require('../../telemetry/metrics') const beforeCh = dc.channel('dd-trace:storage:before') const enterCh = dc.channel('dd-trace:storage:enter') +const spanFinishCh = dc.channel('dd-trace:span:finish') const profilerTelemetryMetrics = telemetryMetrics.manager.namespace('profilers') const threadName = (function () { @@ -80,6 +81,7 @@ class NativeWallProfiler { // Bind to this so the same value can be used to unsubscribe later this._enter = this._enter.bind(this) + this._spanFinished = this._spanFinished.bind(this) this._logger = options.logger this._started = false } @@ -127,6 +129,7 @@ class NativeWallProfiler { beforeCh.subscribe(this._enter) enterCh.subscribe(this._enter) + spanFinishCh.subscribe(this._spanFinished) } this._started = true @@ -200,6 +203,12 @@ class NativeWallProfiler { } } + _spanFinished (span) { + if (span[CachedWebTags]) { + span[CachedWebTags] = undefined + } + } + _reportV8bug (maybeBug) { const tag = `v8_profiler_bug_workaround_enabled:${this._v8ProfilerBugWorkaroundEnabled}` const metric = `v8_cpu_profiler${maybeBug ? '_maybe' : ''}_stuck_event_loop` @@ -242,6 +251,7 @@ class NativeWallProfiler { if (this._withContexts) { beforeCh.unsubscribe(this._enter) enterCh.unsubscribe(this._enter) + spanFinishCh.unsubscribe(this._spanFinished) this._profilerState = undefined this._lastSpan = undefined this._lastStartedSpans = undefined From ec0a9497c9ce1c2ce01f7f549358d9263cc4668d Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Mon, 13 Nov 2023 16:17:54 +0100 Subject: [PATCH 062/147] Only consider the active span and its ancestors when looking for web tags (#3780) --- .../dd-trace/src/profiling/profilers/wall.js | 26 ++++++++++--------- 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index b5c83d412c3..1add4159091 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -52,10 +52,6 @@ function generateLabels ({ context: { spanId, rootSpanId, webTags, endpoint }, t return labels } -function getSpanContextTags (span) { - return span.context()._tags -} - function isWebServerSpan (tags) { return tags[SPAN_TYPE] === WEB } @@ -150,8 +146,9 @@ class NativeWallProfiler { const span = getActiveSpan() if (span) { + const context = span.context() this._lastSpan = span - const startedSpans = getStartedSpans(span.context()) + const startedSpans = getStartedSpans(context) this._lastStartedSpans = startedSpans if (this._endpointCollectionEnabled) { const cachedWebTags = span[CachedWebTags] @@ -159,14 +156,19 @@ class NativeWallProfiler { let found = false // Find the first webspan starting from the end: // There might be several webspans, for example with next.js, http plugin creates a first span - // and then next.js plugin creates a child span, and this child span haves the correct endpoint information. + // and then next.js plugin creates a child span, and this child span has the correct endpoint information. + let nextSpanId = context._spanId for (let i = startedSpans.length - 1; i >= 0; i--) { - const tags = getSpanContextTags(startedSpans[i]) - if (isWebServerSpan(tags)) { - this._lastWebTags = tags - span[CachedWebTags] = tags - found = true - break + const nextContext = startedSpans[i].context() + if (nextContext._spanId === nextSpanId) { + const tags = nextContext._tags + if (isWebServerSpan(tags)) { + this._lastWebTags = tags + span[CachedWebTags] = tags + found = true + break + } + nextSpanId = nextContext._parentId } } if (!found) { From c390a5ee4cb73099fd884c7e5d254b1a10febeab Mon Sep 17 00:00:00 2001 From: Bryan English Date: Mon, 13 Nov 2023 10:40:35 -0500 Subject: [PATCH 063/147] enable arm builds for single-step (#3791) --- .gitlab-ci.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 600cdd34312..0fd16dae1cb 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -25,6 +25,16 @@ package: script: - ../.gitlab/build-deb-rpm.sh +package-arm: + extends: .package-arm + rules: + - if: $JS_PACKAGE_VERSION + when: on_success + - if: '$CI_COMMIT_TAG =~ /^v.*/' + when: on_success + script: + - ../.gitlab/build-deb-rpm.sh + .release-package: stage: deploy variables: From acd2d174bd349ae40dc0241f5b3be6879236a1a9 Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Tue, 14 Nov 2023 15:11:39 +0100 Subject: [PATCH 064/147] Obfuscate secret tokens (#3786) * Obfuscate secret tokens --- .../hardcoded-secret-analyzer.spec.js | 7 +- .../resources/hardcoded-secrets-suite.json | 132 +++++++++--------- 2 files changed, 70 insertions(+), 69 deletions(-) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js index 9a8533dfd45..28734729558 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/hardcoded-secret-analyzer.spec.js @@ -26,12 +26,13 @@ describe('Hardcoded Secret Analyzer', () => { afterEach(sinon.restore) suite.forEach((testCase) => { - testCase.samples.forEach(sample => { - it(`should match rule ${testCase.id} with value ${sample}`, () => { + testCase.samples.forEach((sample, sampleIndex) => { + // sample values are arrays containing the parts of the original token + it(`should match rule ${testCase.id} with #${sampleIndex + 1} value ${sample[0]}...`, () => { hardcodedSecretAnalyzer.analyze({ file, literals: [{ - value: sample, + value: sample.join(''), locations: [{ line, column diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secrets-suite.json b/packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secrets-suite.json index 385be9bf4b9..f57bf70f2e3 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secrets-suite.json +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/hardcoded-secrets-suite.json @@ -3,267 +3,267 @@ "suite": [ { "id": "adobe-client-secret", - "samples": ["p8e-042c420E161f7DcF56Bc23414b5Bd9C0"] + "samples": [["p8e-EDAAd0", "2489877055E91DD4B9F6c72Fa1"]] }, { "id": "age-secret-key", - "samples": ["AGE-SECRET-KEY-1QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ"] + "samples": [["AGE-SECRET", "-KEY-1QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ"]] }, { "id": "alibaba-access-key-id", - "samples": ["LTAIdmr29cel83fy2lyyeovp"] + "samples": [["LTAI3vrhsg", "gqwovgecxxq3og"]] }, { "id": "authress-service-client-access-key", - "samples": ["sc_z44wdom1zo8rju28l.r9t1b.acc_rpml1az3z4osyg7x2vjphgt26z42dq8.7+g1dba23g6rglng38i-vz4l+xsbi/2k+t24-jx4ww2bpezxp1bkj_rhg+6uy-t7sww2yxgultr=_2+6=90v9sa=cuyescgxc9y7w/-7vw=5vsks-aw", "ext_l889tuwc8bchl45cecib53al.ipvif.acc-qtpje8wk-k75.u4s1xhn/6u9u=zumfc86/z2/y5gu6k4_=v8464ac7i0aw4gle-8jdcw9cwd6b7ew5/vr65uqb", "scauth_8z8yn4w4o3os5798yjxhaadei.i0rmr.acc_hld57ogjggplejcwq4ci.74=y9s4y+ocm=mx8fcmdqb8y6--wzl1kk3pwfjb_r0l7o-g=u5m55gn5+o69sh=z26bk/a2+bo045bk78ac9-+ueqy05rhtj", "authress_xet76brco0osua5d9bqedw.l75lm8.acc-56pcd8z589v3vfcw4v3y.fbexbmkab4fe_+vdu/_qfdtvmc0jnb3xma38mudys9/js1zlwvn28jmbwxk=1a1-ax7/h2"] + "samples": [["sc_p0gtuom", "1ln3n7kt.13ew6.acc_ejjq9z2wbdcpn6ly923zma.6h6eh2alxu/z/mfr1a3o3nv_1gd9n6iietsjhxy+b6u/sjqp7og1z1bt19razf_5m2vr8+kdq7n_7zts52b-0vyrr=xngyl+94z5"], ["ext_8t7dcj", "2mexcn9i7e8d1dda5.773t.acc-ccaz13gd7vj3clks8gnm./z81y=0zgfsl/byb2kr4acvuma1y7-4sh1hsgbf8vt4s6vgl3tk//y49eopxlb7gqadj9f+0eyjwmc259q+o47vy/nn71wwt1xc"], ["scauth_p7p", "uuv3lnhtbxv06fqourrlt2.9uc0.acc_0a7m8-l2fahi02kgc4ylhgrl8w62sw.kzm7a4k6/82kwlly6u+_4qr1wwxqnewkw5_yq_lr7a4tu//b1ad"], ["authress_v", "yr5hwjqf4r.kpz2ly.acc-5ire1y8gxxt0ka.1+mcp516l_v_hdif3-8jafw4iktz8zfh5-57tkmqhrigi24dst74s6lvn"]] }, { "id": "aws-access-token", - "samples": ["A3TMAWZUKIWR6O0OGR7B", "AKIACGDXGKON38QRPSN6", "AGPA7L8NOOMJ8AR1TTNF", "AIDAUNJX3LKHQJGAQ6MS", "AROATVD0Y20OIK7VJPCL", "AIPA7AXHEAXF7EFJREEJ", "ANPA91BE6VSIUANWG4DY", "ANVA8M4EPVQU0XVE1G3L", "ASIAPLKQBPE8T9JT1KBV"] + "samples": [["A3TMFP3VCN", "RUS4U3CM2M"], ["AKIAH4YXF2", "KPEXC6X8DO"], ["AGPANBREO7", "N1NQFNIHI8"], ["AIDABWX6AW", "SF70OX56QP"], ["AROA804V27", "K62LFC6Q48"], ["AIPA6R18NP", "89QG9DRUZ8"], ["ANPAIRDAIZ", "M9HLNDBKTZ"], ["ANVA98YIVK", "5WL0L3K0PD"], ["ASIAHM4AY1", "O8FDV5P6PN"]] }, { "id": "clojars-api-token", - "samples": ["CLOJARS_93m8jgicwpny2zbzbzyk0id7pg7h64zd41m4skb5lrwtkl5k941hsob1zosd"] + "samples": [["CLOJARS_4e", "lb8x3mghlnxqypcu7ntssngn2o09zir7ogdfrtm4tt1c3nbazzv8amv74v"]] }, { "id": "databricks-api-token", - "samples": ["dapiad55e7agdd5469e223cf3eab02042cc6"] + "samples": [["dapih3d1b8", "5a1g8bga4992c1hg4e9318a2cf"]] }, { "id": "digitalocean-access-token", - "samples": ["doo_v1_3a61580fc6e8053e6f00a533dd53de2a347ad0fa94d47ab12e36a5671ce1c7b7"] + "samples": [["doo_v1_1c0", "26765063ee70bb5cfa0b0fecc05cc6c2fa7768d06da21a0cbba3bbac253f5"]] }, { "id": "digitalocean-pat", - "samples": ["dop_v1_3ef76a17874a2506cc60646c5d4ff6ab96d5866341500b98a80af0c91cc2fb1b"] + "samples": [["dop_v1_58d", "fa4cbf2463d8e04f5a0b8821f698d4d4b138891b6a1004348727e7f12d6ef"]] }, { "id": "digitalocean-refresh-token", - "samples": ["dor_v1_ef583b09000185cbea95598892ac77c2f035f29cd73fff20b076cc2e178abeb2"] + "samples": [["dor_v1_02b", "fbbd80069ecf54b92ba631ea4faf84c710c9f8c437c6766a10c205287b330"]] }, { "id": "doppler-api-token", - "samples": ["dp.pt.5esgp01d2s5mtsv9vbq9ong5jksv6e70cdsjbixxdyn"] + "samples": [["dp.pt.6lel", "5ltfww6ng1lrf4ta4rrwp25ifilvlt63xu7qrl1"]] }, { "id": "duffel-api-token", - "samples": ["duffel_test_z-8nmbsiyv6t_y9figh0velran-np582=tilqgk7z2e", "duffel_live_kamqmtr-s1eu0qn=de65zdtqpy2b8na6a4g=bub8o_k"] + "samples": [["duffel_tes", "t_yau4wm_3boiq6t8dqmaollcz7t7w9cuv2_=qje05lwv"], ["duffel_liv", "e_ls-f06bglojn-89cso6-6019ts7myhc12enou19cbok"]] }, { "id": "dynatrace-api-token", - "samples": ["dt0c01.nyw93k1t6k4gr5az2oceyjs6.m8uc8qz3ibz90hvur8079rfrld95fvdgz0rmhq84enj4lzbgrthl29k42c14q4yw"] + "samples": [["dt0c01.yxq", "mhvm70vew2cwh90tdltw4.tt1q27qpmi4breb4e9e90vrdzcl4gbebv3xcvr8xxgp31gswsigur538by00wwjl"]] }, { "id": "easypost-api-token", - "samples": ["EZAKtka0inaa62wilgvc2ovo2n0pxteydnkyx61paqiggl6hxwr5f2yqci"] + "samples": [["EZAK3pyq8j", "xaqltfxlttzm74uu0azb71hbb7e0804ft9ttatj2omgg7klq"]] }, { "id": "flutterwave-public-key", - "samples": ["FLWPUBK_TEST-c22b4eg81182hhdagchd0016abe1f472-X"] + "samples": [["FLWPUBK_TE", "ST-e4358f76hf2a8g64169fa1d58322ac53-X"]] }, { "id": "frameio-api-token", - "samples": ["fio-u-3x7rov97qo13bdr=3mlza00b5bya5x50mfz6lx_xtoce6b=_4uvou_qc-6zn-9b5"] + "samples": [["fio-u-i0ah", "qyzz2wj04s=ospunes1u96k3l3lv_n105uefmzl__cfgwiv2ikv2=tkxn8n_"]] }, { "id": "gcp-api-key", - "samples": ["AIza1RvSeh9Ni9feYDm4gEH3R8NkbvUzdgbLGap"] + "samples": [["AIzagiRmKa", "zHN2TBlarh7C6DZLio_jJFWoRJLhI"]] }, { "id": "github-app-token", - "samples": ["ghu_OCfEKaeP0l5vmtP9aI2BuSaz2keLqTISTtyJ", "ghs_TIFULl9l1YQvBDbcgMWsGCqVto6DcBTV5Zh0"] + "samples": [["ghu_vAxxW1", "u41LHiUYdT0w9ayqGE7KpDFebuP52p"], ["ghs_iWPGeF", "Y6iNSLaUQ9bq5USqcBRoMzctOVNyHz"]] }, { "id": "github-fine-grained-pat", - "samples": ["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"] + "samples": [["github_pat", "_sB5usIsunOByK3zWYm5EEnUIgDXH9FZZwLOaERzWj8UVa2S6h8j6S4i5IodPMMkIQC1_gpi7Oakvu_xeMR"]] }, { "id": "github-oauth", - "samples": ["gho_RK5eFZyEy5bHC7LKmlpxsP9BmuqwkjkKNTeR"] + "samples": [["gho_0lR6rs", "IsgCJ9yOb4y0SQSQbjGKNBIQZ3Umjm"]] }, { "id": "github-pat", - "samples": ["ghp_5w9Qp0PNLroCShgHLK8T4aQJdbK2yjnYo9Y8"] + "samples": [["ghp_IpfDBs", "Qe5xeVrVeGfLJk0CLBgILAB9Q4gtXI"]] }, { "id": "gitlab-pat", - "samples": ["glpat-IKx0e0mpgRgyHEHChuhi"] + "samples": [["glpat-ujLI", "APYyTwkmmEHZwsUL"]] }, { "id": "gitlab-ptt", - "samples": ["glptt-6538261e6952d9167791839d18f35d80b9454719"] + "samples": [["glptt-dde6", "edfff934db1535930fe73dd2f41dd1a7d4af"]] }, { "id": "gitlab-rrt", - "samples": ["GR1348941uSeVnS3DcR0GedeOoqFs"] + "samples": [["GR1348941C", "VV65wWADj8hvaRYrb30"]] }, { "id": "grafana-api-key", - "samples": ["eyJrIjoilkFWbNTrH6ZBwDWV6tp4kH8gVeZBj4EaIZ1QIh4nt4E3lmbJ1bbq8hReljA7xSIN183XLFNYyBKqDF4sDPKHcYblIXoih7zbv0BmtE8vOC27WkbgoR3iHBudhSOyao10Mt4MDS99RY3ageOh6I4PDQUuGiFbzJUjLBunqOdWS4RtIU7CDq2Sulw5ZKFfNhJHDTOetl1rpi7J9klwkCXIQCzkOzNfJ7JuvJpzGcTbzLnLon6yiT4KKA0BdnQ3XVirZMYz5wpIt5TFYn6l3Z7zD8kuToVcMZDdn6wbPH2vnvaOrvmzrpA8YsnalngRaN6U1zcc0eVwgLLSiVDhBguhYFF4SWzmllVazFW1S"] + "samples": [["eyJrIjoi7p", "eYbtEnd3onRcF9VIU4jW89hU5ygaHl8IoOZsow05IZdnNrt7FCsFPkqf0GyQHDwDpJy1kndiEoOjf4GNEiUF322Dh41h8jxEna8D32WciKBkWH969kMjsqKwQvTqv6ke1J6F6DULL2gs1Rqa45iXXOV6QLwIiqqmjgtML396QAX0Jb59E3vzXMHmy9zuQLnfL4JUhePA5lHX1fwzPxEp40OWZGYLEPqEX1oPRlbkou4wFtvSNAfcH7kVpw4bFD6q4CA90ghoVdBJ6cRpVF8EtO7ZQTjfqAekyN1Dtwbqc0Gn6aYNfPG9F26qFlsh7B7V9aVskO6inRl2HTc04WAb4ef=="]] }, { "id": "grafana-cloud-api-token", - "samples": ["glc_NUOcNiW3Ql+Q6dZ8xwjKfpuL72xdd/MP4ijNKo3X8flBnWb9bJlHv+fjlDhXx4aRZuXX6LxJ4LOAEtjka0BTZEf5D/oHHgmDEUo5+DCPYcOkwee4f8893G3UPdU6jqCXlK7wEqlXxy7917IkhwnKjpM+cTpgvHsua3+mjuz1XlLEL6eUVvZzC4z899UwAEnpGNi+8iBz8yy+dHyh1VLAu5rhDComZC0S9i++XgXytm2QF1e1Ky7r5n7MhA665bZ6+gA7J0JU9ZSHDacOjzcmGIZWmxcXld5SN7Sk0jsVpnufUzJGksWG/yz1HqUXYyvggA6HI4Z/GG9fa5BznVy4GzSbHccKOV833N/U5C+sTX/40UbOphvD/3Zttf/rbPxxKGnM5s6lX"] + "samples": [["glc_ID6ub4", "wphFroRSiWzRDkxI4omBhbf6abaQ0gvjK/Wq8wOuuJYJtqOnLALSIaXVJmrPNAeCnPGENHLIZ3TYDWNvNzE0UcawoBgWE+gtqVhdkJaE0D4RH/RyY8Ca7qtK1VbLm0DbWltt1wKDZA/RkJppwu+I8GxAoXoOuOeA/c6hRj9nx+B7qblrsyb417fQztEeI8VKh/SIOHXMwXW+F2VNx8i7zmKD9qukffe1o2arIdckaPHelgK9X3sMaZX71PKkYztR4hVOcpFTf9hkerdcfxMoySbOfe24T4Mni2mEK8guxZsc4zcKxtEnmEelFHv9zMAnx5CoFZSjQe2Zm+VrBrHmzkAdYA+DMj6lYEr/x0FXbhWIOE/HxD7HEa0M4kPEb"]] }, { "id": "grafana-service-account-token", - "samples": ["glsa_tyZkxBNfrk31AyRmYCRQDkMstrOU6mJq_a27a7E2E"] + "samples": [["glsa_riQAg", "SW4MluKRO2OSA1RImxf8PeFLjA6_D209Be6d"]] }, { "id": "hashicorp-tf-api-token", - "samples": ["cgjiwnvcstc3tf.atlasv1.17e58vdqi_2fpmt7ycmuastd627exxc3ulxbd7lg_3bk8ji3a0r3ixxjob_g0=5x9="] + "samples": [["0slit8am7c", "u8hc.atlasv1.3bl5k2yx5=exmkjvb7a9kg21ud6y-oj-v5hbgagpd9ukqo0d1tvdigtwlwkg8=zq2gbu"]] }, { "id": "jwt", - "samples": ["eygSDWUVjIpApmLFgg5js.ey7TxRd7iF-8Cx6NgcI6T-dcv0.1wQL45DMDTA=", "ey0NOJt93t9zeJbBtD2wI.eyoLE6hd_XC6SSVl3xAEb1pHwq."] + "samples": [["eymxaQKYwN", "DrjyU5BZi.eyztNWMW-IgrhIqwlbGiIAOxi90ui.ieoLMDPoDDYrTeUgI9"], ["eynTAssTgb", "kP5rLnybcH5X.ey/LdxrTJQcB4suJzP7r4wOdYHGQ."]] }, { "id": "linear-api-key", - "samples": ["lin_api_l4ak83b2jsjf1qjae5dzn45v7com0lzihn3h8qrc"] + "samples": [["lin_api_yy", "b9aiejfn7batneic9ky9nejcqc62s4vh589p4c"]] }, { "id": "npm-access-token", - "samples": ["npm_5qxvhxa03dxiteq9pm6pzc73b7nxcbxnacsn"] + "samples": [["npm_kij9i4", "2j7tqsn3yk7081bmn4dp2yof3i7dn1"]] }, { "id": "openai-api-key", - "samples": ["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"] + "samples": [["sk-jboPY9q", "evozlPdR3rj52T3BlbkFJ3Xbh0WRnIqB5lVRSgnwf"]] }, { "id": "planetscale-api-token", - "samples": ["pscale_tkn_crpb04n.ck=r.-d-8m=-0..fn7xz36eja0ap886d__hgd3ld"] + "samples": [["pscale_tkn", "_jk5fgt.9dcezw=7u._9z_h.5u0y1qj2iohp8=r3rne.rp7"]] }, { "id": "planetscale-oauth-token", - "samples": ["pscale_oauth_be0oc6crk27h2kjg0qn=1cau6_256bxvtu2"] + "samples": [["pscale_oau", "th_zq0w.cysbk.zt8ms7n=kov.xvisl8ivp02_k413f43ox=6jwa0fdtwxeqca4yny"]] }, { "id": "planetscale-password", - "samples": ["pscale_pw_bjem8uv5og17orsga=h454jyjqetfy615duagc7j=s8l_kr.glbh__.la"] + "samples": [["pscale_pw_", "3hqt3dfn0-r81kjiv_iw75__ktl.hg-w5zgxzze2x9=n.ctutuwkgv--j"]] }, { "id": "postman-api-token", - "samples": ["PMAK-1797993c98e59a0f9c09bc3b-e4dce5c2256119617285a763e2fb63ee75"] + "samples": [["PMAK-53968", "a5c3624c04598edb18b-7754920c28f71499915a98a0e65b06e484"]] }, { "id": "prefect-api-token", - "samples": ["pnu_knaxhdfqlx5rhxqiwxodq4w50nc25prw0kqe"] + "samples": [["pnu_harwid", "sxsm6rjkjd8hbh73kog2nz1vw73v17"]] }, { "id": "private-key", - "samples": ["-----BEGINF4PDVGVUG99E46VLEP4OQ982F7WHB-NUUGBA8-6E187SG61F0JY7JHGI7Z90PRIVATE KEY----- - KEY----"] + "samples": [["-----BEGIN", "W1K3XHIP8-AH8V9-GCH65LHJKRHK2857QYF05EWGYX23O3JUZ7J5JT5LUASG-9YAGPRIVATE KEY-----KEY----"]] }, { "id": "pulumi-api-token", - "samples": ["pul-8437f190bb1e04414d15b87af38cd68dff596dbf"] + "samples": [["pul-7190e2", "afe16c4e171c05b11568859442f3b5b64e"]] }, { "id": "pypi-upload-token", - "samples": ["pypi-AgEIcHlwaS5vcmcerQnhfw1KFC_Xv-2lgxlzJTNQ7C8qpZJGGpPlZ1FIUEr3N-mwINvTx1UY"] + "samples": [["pypi-AgEIc", "HlwaS5vcmcB4VlkGTKEYcWMbZBOoZmTZoyOzGMvyrgGcDlGIOxmk5ROMJIst1IP"]] }, { "id": "readme-api-token", - "samples": ["rdme_nuq3gsy66jd0jhyyqrcwmb8pgtqx4ljl4tbjxlfta9lzsa5acwlmj1zlwawxcabua250fo"] + "samples": [["rdme_8witn", "1adpkz8i1n37dowa0smubx8sa8tjorpmt2r1sfvuj7o15200wyie59ldbvomgy3j7"]] }, { "id": "rubygems-api-token", - "samples": ["rubygems_cf23f5e8c403da8bb754452198555cc743ca5cf3fb194073"] + "samples": [["rubygems_0", "e871386ebed3dd645df1310585e03bb700076d4db4d779c"]] }, { "id": "scalingo-api-token", - "samples": ["tk-us-TX4DXzB0cNC_KnbIL8N14v8GhrJqH2LelLaxqy3mVmRH4MY3"] + "samples": [["tk-us-ydgc", "NbZ2IiL2NzT_jxoZX-3IMgy4oK37KjaIafoyNLrONYMR"]] }, { "id": "sendgrid-api-token", - "samples": ["SG.rbsi3jp7ont7glhbo6.i0kicrd10qzut17d5e8royv9cpnw9ttu5504g7p50v3q36."] + "samples": [["SG.j0t3ooq", "hwruz_y.lzltpg.cf8hh2j=6t1=.9ohi5yorz231-lo-ufl6kxn9gg1-zya"]] }, { "id": "sendinblue-api-token", - "samples": ["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"] + "samples": [["xkeysib-a3", "b81b566200b2d089ef0b5eaea3806a564fd12001a5c8649eb5f428867cab80-o0l46hpq577cafes"]] }, { "id": "shippo-api-token", - "samples": ["shippo_live_af42cbb904f27f042607741b265c6ccdb96ff56d", "shippo_test_e50a31d2632326072aff8cd1b073ae79ca39ba8e"] + "samples": [["shippo_liv", "e_44fb5ad5113f0f3396c17aa4d1537745440db445"], ["shippo_tes", "t_b067b13f1af9b23347ee48367558f2fb1f53e6c9"]] }, { "id": "shopify-access-token", - "samples": ["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"] + "samples": [["shpat_dBF5", "0bc0c3c02b8E5B54dEBDc9A7A5BF"]] }, { "id": "shopify-custom-access-token", - "samples": ["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"] + "samples": [["shpca_dfc4", "bC9AeFe6c1D5Ce8bac29c205fAAC"]] }, { "id": "shopify-private-app-access-token", - "samples": ["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"] + "samples": [["shppa_0F65", "3386D98d09B6c71CEaa0d2C5eb8f"]] }, { "id": "shopify-shared-secret", - "samples": ["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"] + "samples": [["shpss_AC5f", "50c52C4DCDf69aC52893AF369A74"]] }, { "id": "slack-app-token", - "samples": ["xapp-2-TQRU-2-pi9b"] + "samples": [["xapp-4-J22", "2-45492-lh9a6"]] }, { "id": "slack-bot-token", - "samples": ["xoxb-98990225585-12353152514"] + "samples": [["xoxb-25169", "65025178-629147915426"]] }, { "id": "slack-config-access-token", - "samples": ["xoxe.xoxb-9-PMWS1VD0PB4OA3DK5YFEHVCZTBHO5S70C0W8QCFXX2AQ0WH8L8DMO223K68ZNY56EDMKHF42SSQPUV974JD6PULK73YTDQJ86EMWJ71FEL36LNJY7B5EV9RT2CMNQKYCIRSPINX7R72D0O8UT9WCUEN7X23HUNZ3W1Y", "xoxe.xoxp-8-PTN8WYGN3ND14QYWSDEV2QTQMEI8R90FZ0UCZWRY0J9QE2TIEAIUYJA9A7VSQI64X7CD6W75WQ4SQDLTV93R0GRPZUB1B9HSNF3G0PLSDN8HPST7VB6QOODRQWNMUF4WILELJE3GYLONHJAHBEG9D0E8BM0EB887SM8DNH"] + "samples": [["xoxe.xoxb-", "6-LF0T5F67P5IWYCH24XT8IB7TKICBNV808OGTTI7XDRCWYXIJ9HGJQYJKHHJWC4U47RYK6YJVZ6518L4XA3CFPZKJE5LL9KUJ7IYG98MK8UTU3SM6MVWI1OPIHOFAM7WOLW30WYXCQ1H4FZPWTP8NYLRS2Q8U3L8A8FIHZ"], ["xoxe.xoxp-", "2-7U2AGVCS8TR79I0AEK8G0DIE3H6ARI67GKPDTRLHNQVJ4BB0OIDOXD74LXG9ZCA0WT0Y2WQX812KR9K2GGVJFSKDMV26KMSLP5XLKM722PP6KXMMNO3TQ1VDJXLH4TMWJQ4WL8R3I3RZCN2ZIPT5S85XJTCXA08IC2X42"]] }, { "id": "slack-config-refresh-token", - "samples": ["xoxe-4-02DA0JGWGLP95TSHFK7OYEZMQ45K733RZ0M8B7KCTS5XS8X3Q5I70CTWC0U6N6YNR8JFGTGH944GIC4RHC3BU61XG05LNNZX3RGUYRW7YA75HNNA5CZLCX2UHW9PX54WFHN16K2ZP6MU95Q3N5"] + "samples": [["xoxe-0-79K", "XK6A54W8OARDKVGM1643S5IWK086SOE8TYINNSH3O1TIVZNIVNJRDF21BJ0E5ZQ95SUVN39IZAP0LAAVNZKEFX5N6QLD1C7YLUMCE152OA2C0BQBJF1GPL4IXGZ6R5QXD1P0Y7QV3RZE53W"]] }, { "id": "slack-legacy-bot-token", - "samples": ["xoxb-2635165305-qYktSa9LhLqa2jsoJuk"] + "samples": [["xoxb-93863", "72971490-tHHi79NSwtRjv5LSxZH15la1uv"]] }, { "id": "slack-legacy-token", - "samples": ["xoxs-6-8-0-E3bc3"] + "samples": [["xoxo-8-038", "06-92105-4"]] }, { "id": "slack-legacy-workspace-token", - "samples": ["xoxa-0-syVLBd4iVIadCxpG9rPMLP1FvrRs11UmaulCnXp", "xoxr-wPFyQFwKOTbIkg0DwggalChTNO28bNjc23fW9tO"] + "samples": [["xoxa-4-yvu", "xRbbzJ"], ["xoxr-EchWx", "U9oknZJBpb28wGNCtcGNNdCuKq8EYTFEesWbW"]] }, { "id": "slack-user-token", - "samples": ["xoxe-6679527551-6986334739-864697748900-426-jwW3zxCLz3VcIsJq-ZgF8t8UdsfK"] + "samples": [["xoxp-30723", "4221103-4883165294-800633518510-HjrcUaoMJ4dMCu7OJACJkzqwL2WL"]] }, { "id": "slack-webhook-url", - "samples": ["https://hooks.slack.com/services/a5ixwCnZkBl/sRAEeDcbQP+JDmyJpdtNgypWZYuyNgi", "https://hooks.slack.com/workflows/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"] + "samples": [["https://ho", "oks.slack.com/services/i6l82GnghZ6EvnrTIp+5HRrsyQuvtpDFuQpCmHMuSk0aI"], ["https://ho", "oks.slack.com/workflows/JTk448L1kQO4JlX4z4W0Vf+psAcPnidRkNHbL1xG54rYz"]] }, { "id": "square-access-token", - "samples": ["sq0atp-S_gaYllSJ9Oc7Ek8i_2FTZ"] + "samples": [["sq0atp--cM", "Qnx4JFfKxBtWxE2pzKy"]] }, { "id": "square-secret", - "samples": ["sq0csp-W2DDXterE69EiRm8U_G3TqVw2X6-aEZ1uEAYKYEahKA"] + "samples": [["sq0csp-yiz", "7FxTqbr_2svCmXCGzT6Ggvc-febrP17PjqqqOork"]] }, { "id": "stripe-access-token", - "samples": ["sk_test_z01pxuk87iboja60sn9zj", "sk_live_cg98txrpd4nz", "pk_test_0he1pnbtdwhz0c8s025yuokfp1mhh56", "pk_live_kc5idmjc8a"] + "samples": [["sk_test_6u", "n1199r2z0"], ["sk_live_fa", "7nys600l0qhhypdv6md6h1yymdb"], ["pk_test_b1", "3jajl5fzn"], ["pk_live_2l", "twjjbkd6teh9t5p06tox9zmx"]] }, { "id": "telegram-bot-api-token", - "samples": ["52590039:AbM1qhPt10XJ93tkWlM_54otulznq8q9Oed"] + "samples": [["42168007:A", "iC1R4BmknR2pf9W3J2702PwV21koL7t5D7"]] }, { "id": "twilio-api-key", - "samples": ["SK7dF46e2e21CcC5A2f1997a4648DAFfbC"] + "samples": [["SK8f34B8A4", "6537cDB8defA3de6D3E3f1a1"]] }, { "id": "vault-batch-token", - "samples": ["hvb.b167n7806s59tc4u60znk7n4hmrdcra57y_f2y7dfi9u4j7mtlbrz9bgob0bq5ypsab7ey4i4vhb95-9rew2puf_x431_6zmwub4hai6t5ye1-iuc2wlj7a_7dkl6cj976rp4h1r71jv8bixvjyhkod0ud8e18-q"] + "samples": [["hvb.7b57r-", "7sz2660kyeuwpzllx3epou2t1px91koj_kba11k4d9n5mc1rdwqb3bxm1farj3un1lyexkv8tmlrh9hz0f5f5ms8z1lv05umo-dbpw54auur1ge4_n2d9pto51q8--h03-ci5gjdlbel4bhtwtxmcrp-r1i0g95a"]] }, { "id": "vault-service-token", - "samples": ["hvs.g_700cgmm40xcdyxkp9yjg6qza83ozot-zyk7qaalda0szelje0mxhyadrb0fo_ypgyoqo25p8nfgg9-7wuj_c83byhpol68fnk"] + "samples": [["hvs.kodv8r", "fyp--0r_8cmrsgq_2c7mzqvtj3d9ktz40gve1xzwpgceow-7_mya0w40c82hwu0oot2lxmiepk9sorey3i-96zee"]] } ] } From 6330c90d966137964caeb4d6fb691c1fa135dc1f Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Tue, 14 Nov 2023 15:24:05 +0100 Subject: [PATCH 065/147] ASM - Update collected request headers [APPSEC-11226] (#3795) * Update collected request headers --- packages/dd-trace/src/appsec/reporter.js | 65 +++++++++---------- .../dd-trace/src/plugins/util/ip_extractor.js | 13 ++-- .../dd-trace/test/appsec/reporter.spec.js | 4 +- 3 files changed, 40 insertions(+), 42 deletions(-) diff --git a/packages/dd-trace/src/appsec/reporter.js b/packages/dd-trace/src/appsec/reporter.js index 62e81e93f86..5fbfb92cfb3 100644 --- a/packages/dd-trace/src/appsec/reporter.js +++ b/packages/dd-trace/src/appsec/reporter.js @@ -3,6 +3,7 @@ const Limiter = require('../rate_limiter') const { storage } = require('../../../datadog-core') const web = require('../plugins/util/web') +const { ipHeaderList } = require('../plugins/util/ip_extractor') const { incrementWafInitMetric, updateWafRequestsMetricTags, @@ -13,54 +14,49 @@ const { // default limiter, configurable with setRateLimit() let limiter = new Limiter(100) -// TODO: use precomputed maps instead -const REQUEST_HEADERS_PASSLIST = [ - 'accept', - 'accept-encoding', - 'accept-language', +const metricsQueue = new Map() + +const contentHeaderList = [ 'content-encoding', 'content-language', 'content-length', - 'content-type', - 'forwarded', - 'forwarded-for', + 'content-type' +] + +const REQUEST_HEADERS_MAP = mapHeaderAndTags([ + 'accept', + 'accept-encoding', + 'accept-language', 'host', - 'true-client-ip', 'user-agent', + 'forwarded', 'via', - 'x-client-ip', - 'x-cluster-client-ip', - 'x-forwarded', - 'x-forwarded-for', - 'x-real-ip' -] -const RESPONSE_HEADERS_PASSLIST = [ - 'content-encoding', - 'content-language', - 'content-length', - 'content-type' -] + ...ipHeaderList, + ...contentHeaderList +], 'http.request.headers.') -const metricsQueue = new Map() +const RESPONSE_HEADERS_MAP = mapHeaderAndTags(contentHeaderList, 'http.response.headers.') -function filterHeaders (headers, passlist, prefix) { +function mapHeaderAndTags (headerList, tagPrefix) { + return new Map(headerList.map(headerName => [headerName, `${tagPrefix}${formatHeaderName(headerName)}`])) +} + +function filterHeaders (headers, map) { const result = {} if (!headers) return result - for (let i = 0; i < passlist.length; ++i) { - const headerName = passlist[i] - - if (headers[headerName]) { - result[`${prefix}${formatHeaderName(headerName)}`] = '' + headers[headerName] + for (const [headerName, tagName] of map) { + const headerValue = headers[headerName] + if (headerValue) { + result[tagName] = '' + headerValue } } return result } -// TODO: this can be precomputed at start time function formatHeaderName (name) { return name .trim() @@ -86,7 +82,7 @@ function reportWafInit (wafVersion, rulesVersion, diagnosticsRules = {}) { function reportMetrics (metrics) { // TODO: metrics should be incremental, there already is an RFC to report metrics const store = storage.getStore() - const rootSpan = store && store.req && web.root(store.req) + const rootSpan = store?.req && web.root(store.req) if (!rootSpan) return if (metrics.duration) { @@ -106,13 +102,13 @@ function reportMetrics (metrics) { function reportAttack (attackData) { const store = storage.getStore() - const req = store && store.req + const req = store?.req const rootSpan = web.root(req) if (!rootSpan) return const currentTags = rootSpan.context()._tags - const newTags = filterHeaders(req.headers, REQUEST_HEADERS_PASSLIST, 'http.request.headers.') + const newTags = filterHeaders(req.headers, REQUEST_HEADERS_MAP) newTags['appsec.event'] = 'true' @@ -158,7 +154,7 @@ function finishRequest (req, res) { if (!rootSpan.context()._tags['appsec.event']) return - const newTags = filterHeaders(res.getHeaders(), RESPONSE_HEADERS_PASSLIST, 'http.response.headers.') + const newTags = filterHeaders(res.getHeaders(), RESPONSE_HEADERS_MAP) if (req.route && typeof req.route.path === 'string') { newTags['http.endpoint'] = req.route.path @@ -180,5 +176,6 @@ module.exports = { reportAttack, reportWafUpdate: incrementWafUpdatesMetric, finishRequest, - setRateLimit + setRateLimit, + mapHeaderAndTags } diff --git a/packages/dd-trace/src/plugins/util/ip_extractor.js b/packages/dd-trace/src/plugins/util/ip_extractor.js index 37c506b40f6..14d87ec64c0 100644 --- a/packages/dd-trace/src/plugins/util/ip_extractor.js +++ b/packages/dd-trace/src/plugins/util/ip_extractor.js @@ -48,8 +48,8 @@ function extractIp (config, req) { let firstPrivateIp if (headers) { - for (let i = 0; i < ipHeaderList.length; i++) { - const firstIp = findFirstIp(headers[ipHeaderList[i]]) + for (const ipHeaderName of ipHeaderList) { + const firstIp = findFirstIp(headers[ipHeaderName]) if (firstIp.public) { return firstIp.public @@ -59,7 +59,7 @@ function extractIp (config, req) { } } - return firstPrivateIp || (req.socket && req.socket.remoteAddress) + return firstPrivateIp || req.socket?.remoteAddress } function findFirstIp (str) { @@ -68,8 +68,8 @@ function findFirstIp (str) { const splitted = str.split(',') - for (let i = 0; i < splitted.length; i++) { - const chunk = splitted[i].trim() + for (const part of splitted) { + const chunk = part.trim() // TODO: strip port and interface data ? @@ -90,5 +90,6 @@ function findFirstIp (str) { } module.exports = { - extractIp + extractIp, + ipHeaderList } diff --git a/packages/dd-trace/test/appsec/reporter.spec.js b/packages/dd-trace/test/appsec/reporter.spec.js index ee0303cc2c0..ba96c885d7f 100644 --- a/packages/dd-trace/test/appsec/reporter.spec.js +++ b/packages/dd-trace/test/appsec/reporter.spec.js @@ -54,12 +54,12 @@ describe('reporter', () => { 'user-agent': 42, secret: 'password', 'x-forwarded-for': '10' - }, [ + }, Reporter.mapHeaderAndTags([ 'host', 'user-agent', 'x-forwarded-for', 'x-client-ip' - ], 'prefix.') + ], 'prefix.')) expect(result).to.deep.equal({ 'prefix.host': 'localhost', From f2944d3ea01232dbfd27fb2784c4df8b54a296e6 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Tue, 14 Nov 2023 16:06:41 +0100 Subject: [PATCH 066/147] GC events profiler (#3770) --- LICENSE-3rdparty.csv | 2 +- package.json | 2 +- packages/dd-trace/src/profiling/config.js | 21 ++- .../src/profiling/profilers/events.js | 161 ++++++++++++++++++ .../src/profiling/profilers/shared.js | 9 + .../dd-trace/src/profiling/profilers/wall.js | 16 +- 6 files changed, 198 insertions(+), 13 deletions(-) create mode 100644 packages/dd-trace/src/profiling/profilers/events.js create mode 100644 packages/dd-trace/src/profiling/profilers/shared.js diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 117211f0034..15404050720 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -28,6 +28,7 @@ require,msgpack-lite,MIT,Copyright 2015 Yusuke Kawasaki require,node-abort-controller,MIT,Copyright (c) 2019 Steve Faulkner require,opentracing,MIT,Copyright 2016 Resonance Labs Inc require,path-to-regexp,MIT,Copyright 2014 Blake Embrey +require,pprof-format,MIT,Copyright 2022 Stephen Belanger require,protobufjs,BSD-3-Clause,Copyright 2016 Daniel Wirtz require,retry,MIT,Copyright 2011 Tim Koschützki Felix Geisendörfer require,semver,ISC,Copyright Isaac Z. Schlueter and Contributors @@ -62,7 +63,6 @@ dev,mocha,MIT,Copyright 2011-2018 JS Foundation and contributors https://js.foun dev,multer,MIT,Copyright 2014 Hage Yaapa dev,nock,MIT,Copyright 2017 Pedro Teixeira and other contributors dev,nyc,ISC,Copyright 2015 Contributors -dev,pprof-format,MIT,Copyright 2022 Stephen Belanger dev,proxyquire,MIT,Copyright 2013 Thorsten Lorenz dev,rimraf,ISC,Copyright Isaac Z. Schlueter and Contributors dev,sinon,BSD-3-Clause,Copyright 2010-2017 Christian Johansen diff --git a/package.json b/package.json index f2248a12761..e2d436fe1c5 100644 --- a/package.json +++ b/package.json @@ -97,6 +97,7 @@ "node-abort-controller": "^3.1.1", "opentracing": ">=0.12.1", "path-to-regexp": "^0.1.2", + "pprof-format": "^2.0.7", "protobufjs": "^7.2.4", "retry": "^0.13.1", "semver": "^7.5.4" @@ -133,7 +134,6 @@ "multer": "^1.4.5-lts.1", "nock": "^11.3.3", "nyc": "^15.1.0", - "pprof-format": "^2.0.7", "proxyquire": "^1.8.0", "rimraf": "^3.0.0", "sinon": "^15.2.0", diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 7860129c591..ffcf58938be 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -9,6 +9,7 @@ const { FileExporter } = require('./exporters/file') const { ConsoleLogger } = require('./loggers/console') const WallProfiler = require('./profilers/wall') const SpaceProfiler = require('./profilers/space') +const EventsProfiler = require('./profilers/events') const { oomExportStrategies, snapshotKinds } = require('./constants') const { tagger } = require('./tagger') const { isFalse, isTrue } = require('../util') @@ -37,6 +38,7 @@ class Config { DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE, DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT, DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES, + DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED, DD_PROFILING_CODEHOTSPOTS_ENABLED, DD_PROFILING_ENDPOINT_COLLECTION_ENABLED, DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED, @@ -126,7 +128,12 @@ class Config { const profilers = options.profilers ? options.profilers - : getProfilers({ DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, DD_PROFILING_PROFILERS }) + : getProfilers({ + DD_PROFILING_HEAP_ENABLED, + DD_PROFILING_WALLTIME_ENABLED, + DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED, + DD_PROFILING_PROFILERS + }) this.codeHotspotsEnabled = isTrue(coalesce(options.codeHotspotsEnabled, DD_PROFILING_CODEHOTSPOTS_ENABLED, @@ -139,7 +146,10 @@ class Config { module.exports = { Config } -function getProfilers ({ DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, DD_PROFILING_PROFILERS }) { +function getProfilers ({ + DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, + DD_PROFILING_PROFILERS, DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED +}) { // First consider "legacy" DD_PROFILING_PROFILERS env variable, defaulting to wall + space // Use a Set to avoid duplicates const profilers = new Set(coalesce(DD_PROFILING_PROFILERS, 'wall,space').split(',')) @@ -162,6 +172,11 @@ function getProfilers ({ DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLE } } + // Events profiler is a profiler for timeline events that goes with the wall + // profiler + if (profilers.has('wall') && DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED) { + profilers.add('events') + } return [...profilers] } @@ -223,6 +238,8 @@ function getProfiler (name, options) { return new WallProfiler(options) case 'space': return new SpaceProfiler(options) + case 'events': + return new EventsProfiler(options) default: options.logger.error(`Unknown profiler "${name}"`) } diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js new file mode 100644 index 00000000000..508c6df8388 --- /dev/null +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -0,0 +1,161 @@ +const { performance, constants, PerformanceObserver } = require('node:perf_hooks') +const { END_TIMESTAMP, THREAD_NAME, threadNamePrefix } = require('./shared') +const semver = require('semver') +const { Function, Label, Line, Location, Profile, Sample, StringTable, ValueType } = require('pprof-format') +const pprof = require('@datadog/pprof/') + +// Format of perf_hooks events changed with Node 16, we need to be mindful of it. +const node16 = semver.gte(process.version, '16.0.0') + +// perf_hooks uses millis, with fractional part representing nanos. We emit nanos into the pprof file. +const MS_TO_NS = 1000000 + +// While this is an "events profiler", meaning it emits a pprof file based on events observed as +// perf_hooks events, the emitted pprof file uses the type "timeline". +const pprofValueType = 'timeline' +const pprofValueUnit = 'nanoseconds' +const threadName = `${threadNamePrefix} GC` + +/** + * This class generates pprof files with timeline events sourced from Node.js + * performance measurement APIs. + */ +class EventsProfiler { + constructor (options = {}) { + this.type = 'events' + this._flushIntervalNanos = (options.flushInterval || 60000) * 1e6 // 60 sec + this._observer = undefined + this.entries = [] + } + + start () { + function add (items) { + this.entries.push(...items.getEntries()) + } + if (!this._observer) { + this._observer = new PerformanceObserver(add.bind(this)) + } + // Currently only support GC + this._observer.observe({ type: 'gc' }) + } + + stop () { + if (this._observer) { + this._observer.disconnect() + } + } + + profile () { + const stringTable = new StringTable() + const timestampLabelKey = stringTable.dedup(END_TIMESTAMP) + const kindLabelKey = stringTable.dedup('gc type') + const reasonLabelKey = stringTable.dedup('gc reason') + const kindLabels = [] + const reasonLabels = [] + const locations = [] + const functions = [] + const locationsPerKind = [] + const flagObj = {} + + function labelFromStr (key, valStr) { + return new Label({ key, str: stringTable.dedup(valStr) }) + } + + function labelFromStrStr (keyStr, valStr) { + return labelFromStr(stringTable.dedup(keyStr), valStr) + } + + // Create labels for all GC performance flags and kinds of GC + for (const [key, value] of Object.entries(constants)) { + if (key.startsWith('NODE_PERFORMANCE_GC_FLAGS_')) { + flagObj[key.substring(26).toLowerCase()] = value + } else if (key.startsWith('NODE_PERFORMANCE_GC_')) { + // It's a constant for a kind of GC + const kind = key.substring(20).toLowerCase() + kindLabels[value] = labelFromStr(kindLabelKey, kind) + // Construct a single-frame "location" too + const fn = new Function({ id: functions.length + 1, name: stringTable.dedup(`${kind} GC`) }) + functions.push(fn) + const line = new Line({ functionId: fn.id }) + const location = new Location({ id: locations.length + 1, line: [line] }) + locations.push(location) + locationsPerKind[value] = [location.id] + } + } + + const gcEventLabel = labelFromStrStr('event', 'gc') + const threadLabel = labelFromStrStr(THREAD_NAME, threadName) + + function getReasonLabel (flags) { + if (flags === 0) { + return null + } + let reasonLabel = reasonLabels[flags] + if (!reasonLabel) { + const reasons = [] + for (const [key, value] of Object.entries(flagObj)) { + if (value & flags) { + reasons.push(key) + } + } + const reasonStr = reasons.join(',') + reasonLabel = labelFromStr(reasonLabelKey, reasonStr) + reasonLabels[flags] = reasonLabel + } + return reasonLabel + } + + let durationFrom = Number.POSITIVE_INFINITY + let durationTo = 0 + const dateOffset = BigInt(Math.round(performance.timeOrigin * MS_TO_NS)) + + const samples = this.entries.map((item) => { + const { startTime, duration } = item + const { kind, flags } = node16 ? item.detail : item + const endTime = startTime + duration + if (durationFrom > startTime) durationFrom = startTime + if (durationTo < endTime) durationTo = endTime + const labels = [ + gcEventLabel, + threadLabel, + new Label({ key: timestampLabelKey, num: dateOffset + BigInt(Math.round(endTime * MS_TO_NS)) }), + kindLabels[kind] + ] + const reasonLabel = getReasonLabel(flags) + if (reasonLabel) { + labels.push(reasonLabel) + } + const sample = new Sample({ + value: [Math.round(duration * MS_TO_NS)], + label: labels, + locationId: locationsPerKind[kind] + }) + return sample + }) + + this.entries = [] + + const timeValueType = new ValueType({ + type: stringTable.dedup(pprofValueType), + unit: stringTable.dedup(pprofValueUnit) + }) + + return new Profile({ + sampleType: [timeValueType], + timeNanos: dateOffset + BigInt(Math.round(durationFrom * MS_TO_NS)), + periodType: timeValueType, + period: this._flushIntervalNanos, + durationNanos: Math.max(0, Math.round((durationTo - durationFrom) * MS_TO_NS)), + sample: samples, + location: locations, + function: functions, + stringTable: stringTable + }) + } + + encode (profile) { + return pprof.encode(profile) + } +} + +module.exports = EventsProfiler diff --git a/packages/dd-trace/src/profiling/profilers/shared.js b/packages/dd-trace/src/profiling/profilers/shared.js new file mode 100644 index 00000000000..49acc6ced61 --- /dev/null +++ b/packages/dd-trace/src/profiling/profilers/shared.js @@ -0,0 +1,9 @@ +'use strict' + +const { isMainThread, threadId } = require('node:worker_threads') + +module.exports = { + END_TIMESTAMP: 'end_timestamp_ns', + THREAD_NAME: 'thread name', + threadNamePrefix: isMainThread ? 'Main' : `Worker #${threadId}` +} diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index 1add4159091..ed4d20aab51 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -7,17 +7,13 @@ const { HTTP_METHOD, HTTP_ROUTE, RESOURCE_NAME, SPAN_TYPE } = require('../../../ const { WEB } = require('../../../../../ext/types') const runtimeMetrics = require('../../runtime_metrics') const telemetryMetrics = require('../../telemetry/metrics') +const { END_TIMESTAMP, THREAD_NAME, threadNamePrefix } = require('./shared') const beforeCh = dc.channel('dd-trace:storage:before') const enterCh = dc.channel('dd-trace:storage:enter') const spanFinishCh = dc.channel('dd-trace:span:finish') const profilerTelemetryMetrics = telemetryMetrics.manager.namespace('profilers') - -const threadName = (function () { - const { isMainThread, threadId } = require('node:worker_threads') - const name = isMainThread ? 'Main' : `Worker #${threadId}` - return `${name} Event Loop` -})() +const threadName = `${threadNamePrefix} Event Loop` const CachedWebTags = Symbol('NativeWallProfiler.CachedWebTags') @@ -33,7 +29,11 @@ function getStartedSpans (context) { } function generateLabels ({ context: { spanId, rootSpanId, webTags, endpoint }, timestamp }) { - const labels = { 'thread name': threadName } + const labels = { + [THREAD_NAME]: threadName, + // Incoming timestamps are in microseconds, we emit nanos. + [END_TIMESTAMP]: timestamp * 1000n + } if (spanId) { labels['span id'] = spanId } @@ -46,8 +46,6 @@ function generateLabels ({ context: { spanId, rootSpanId, webTags, endpoint }, t // fallback to endpoint computed when sample was taken labels['trace endpoint'] = endpoint } - // Incoming timestamps are in microseconds, we emit nanos. - labels['end_timestamp_ns'] = timestamp * 1000n return labels } From 326a038d87bb6cac5021bd3be6cf1773152745e9 Mon Sep 17 00:00:00 2001 From: William Conti <58711692+wconti27@users.noreply.github.com> Date: Tue, 14 Nov 2023 12:51:30 -0500 Subject: [PATCH 067/147] feat: add DSM pathway hash to Kafka spans (#3763) * add DSM pathway hash to DSM spans --- ext/tags.d.ts | 3 +- ext/tags.js | 7 +++- .../datadog-plugin-kafkajs/src/consumer.js | 14 ++++---- .../datadog-plugin-kafkajs/src/producer.js | 2 +- .../datadog-plugin-kafkajs/test/index.spec.js | 33 ++++++++++--------- .../dd-trace/src/datastreams/processor.js | 11 +++++-- packages/dd-trace/src/tracer.js | 4 +-- 7 files changed, 44 insertions(+), 30 deletions(-) diff --git a/ext/tags.d.ts b/ext/tags.d.ts index ac794793f20..0aafd03138f 100644 --- a/ext/tags.d.ts +++ b/ext/tags.d.ts @@ -17,7 +17,8 @@ declare const tags: { HTTP_REQUEST_HEADERS: 'http.request.headers' HTTP_RESPONSE_HEADERS: 'http.response.headers' HTTP_USERAGENT: 'http.useragent', - HTTP_CLIENT_IP: 'http.client_ip' + HTTP_CLIENT_IP: 'http.client_ip', + PATHWAY_HASH: 'pathway.hash' } export = tags diff --git a/ext/tags.js b/ext/tags.js index c025d86753c..e270a6bde3a 100644 --- a/ext/tags.js +++ b/ext/tags.js @@ -22,7 +22,12 @@ const tags = { HTTP_REQUEST_HEADERS: 'http.request.headers', HTTP_RESPONSE_HEADERS: 'http.response.headers', HTTP_USERAGENT: 'http.useragent', - HTTP_CLIENT_IP: 'http.client_ip' + HTTP_CLIENT_IP: 'http.client_ip', + + // Messaging + + // DSM Specific + PATHWAY_HASH: 'pathway.hash' } // Deprecated diff --git a/packages/datadog-plugin-kafkajs/src/consumer.js b/packages/datadog-plugin-kafkajs/src/consumer.js index 83102674e04..c29cb389e10 100644 --- a/packages/datadog-plugin-kafkajs/src/consumer.js +++ b/packages/datadog-plugin-kafkajs/src/consumer.js @@ -8,14 +8,8 @@ class KafkajsConsumerPlugin extends ConsumerPlugin { static get operation () { return 'consume' } start ({ topic, partition, message, groupId }) { - if (this.config.dsmEnabled) { - const payloadSize = getMessageSize(message) - this.tracer.decodeDataStreamsContext(message.headers[CONTEXT_PROPAGATION_KEY]) - this.tracer - .setCheckpoint(['direction:in', `group:${groupId}`, `topic:${topic}`, 'type:kafka'], payloadSize) - } const childOf = extract(this.tracer, message.headers) - this.startSpan({ + const span = this.startSpan({ childOf, resource: topic, type: 'worker', @@ -28,6 +22,12 @@ class KafkajsConsumerPlugin extends ConsumerPlugin { 'kafka.partition': partition } }) + if (this.config.dsmEnabled) { + const payloadSize = getMessageSize(message) + this.tracer.decodeDataStreamsContext(message.headers[CONTEXT_PROPAGATION_KEY]) + this.tracer + .setCheckpoint(['direction:in', `group:${groupId}`, `topic:${topic}`, 'type:kafka'], span, payloadSize) + } } } diff --git a/packages/datadog-plugin-kafkajs/src/producer.js b/packages/datadog-plugin-kafkajs/src/producer.js index b7ac9344931..a753021440c 100644 --- a/packages/datadog-plugin-kafkajs/src/producer.js +++ b/packages/datadog-plugin-kafkajs/src/producer.js @@ -32,7 +32,7 @@ class KafkajsProducerPlugin extends ProducerPlugin { if (this.config.dsmEnabled) { const payloadSize = getMessageSize(message) const dataStreamsContext = this.tracer - .setCheckpoint(['direction:out', `topic:${topic}`, 'type:kafka'], payloadSize) + .setCheckpoint(['direction:out', `topic:${topic}`, 'type:kafka'], span, payloadSize) pathwayCtx = encodePathwayContext(dataStreamsContext) message.headers[CONTEXT_PROPAGATION_KEY] = pathwayCtx } diff --git a/packages/datadog-plugin-kafkajs/test/index.spec.js b/packages/datadog-plugin-kafkajs/test/index.spec.js index db6b21cc5a3..a797f83b94d 100644 --- a/packages/datadog-plugin-kafkajs/test/index.spec.js +++ b/packages/datadog-plugin-kafkajs/test/index.spec.js @@ -10,6 +10,20 @@ const DataStreamsContext = require('../../dd-trace/src/data_streams_context') const { computePathwayHash } = require('../../dd-trace/src/datastreams/pathway') const { ENTRY_PARENT_HASH, DataStreamsProcessor } = require('../../dd-trace/src/datastreams/processor') +const testTopic = 'test-topic' +const expectedProducerHash = computePathwayHash( + 'test', + 'tester', + ['direction:out', 'topic:' + testTopic, 'type:kafka'], + ENTRY_PARENT_HASH +) +const expectedConsumerHash = computePathwayHash( + 'test', + 'tester', + ['direction:in', 'group:test-group', 'topic:' + testTopic, 'type:kafka'], + expectedProducerHash +) + describe('Plugin', () => { describe('kafkajs', function () { this.timeout(10000) // TODO: remove when new internal trace has landed @@ -17,7 +31,6 @@ describe('Plugin', () => { return agent.close({ ritmReset: false }) }) withVersions('kafkajs', 'kafkajs', (version) => { - const testTopic = 'test-topic' let kafka let tracer let Kafka @@ -41,7 +54,8 @@ describe('Plugin', () => { service: expectedSchema.send.serviceName, meta: { 'span.kind': 'producer', - 'component': 'kafkajs' + 'component': 'kafkajs', + 'pathway.hash': expectedProducerHash.readBigUInt64BE(0).toString() }, metrics: { 'kafka.batch_size': messages.length @@ -140,7 +154,8 @@ describe('Plugin', () => { service: expectedSchema.receive.serviceName, meta: { 'span.kind': 'consumer', - 'component': 'kafkajs' + 'component': 'kafkajs', + 'pathway.hash': expectedConsumerHash.readBigUInt64BE(0).toString() }, resource: testTopic, error: 0, @@ -268,18 +283,6 @@ describe('Plugin', () => { afterEach(async () => { await consumer.disconnect() }) - const expectedProducerHash = computePathwayHash( - 'test', - 'tester', - ['direction:out', 'topic:' + testTopic, 'type:kafka'], - ENTRY_PARENT_HASH - ) - const expectedConsumerHash = computePathwayHash( - 'test', - 'tester', - ['direction:in', 'group:test-group', 'topic:' + testTopic, 'type:kafka'], - expectedProducerHash - ) it('Should set a checkpoint on produce', async () => { const messages = [{ key: 'consumerDSM1', value: 'test2' }] diff --git a/packages/dd-trace/src/datastreams/processor.js b/packages/dd-trace/src/datastreams/processor.js index 93c4e9191bf..601d81441d8 100644 --- a/packages/dd-trace/src/datastreams/processor.js +++ b/packages/dd-trace/src/datastreams/processor.js @@ -8,6 +8,7 @@ const { encodePathwayContext } = require('./pathway') const { DataStreamsWriter } = require('./writer') const { computePathwayHash } = require('./pathway') const { types } = require('util') +const { PATHWAY_HASH } = require('../../../../ext/tags') const ENTRY_PARENT_HASH = Buffer.from('0000000000000000', 'hex') @@ -134,15 +135,19 @@ class DataStreamsProcessor { this.writer.flush(payload) } - recordCheckpoint (checkpoint) { + recordCheckpoint (checkpoint, span = null) { if (!this.enabled) return const bucketTime = Math.round(checkpoint.currentTimestamp - (checkpoint.currentTimestamp % this.bucketSizeNs)) this.buckets.forTime(bucketTime) .forCheckpoint(checkpoint) .addLatencies(checkpoint) + // set DSM pathway hash on span to enable related traces feature on DSM tab, convert from buffer to uint64 + if (span) { + span.setTag(PATHWAY_HASH, checkpoint.hash.readBigUInt64BE(0).toString()) + } } - setCheckpoint (edgeTags, ctx = null, payloadSize = 0) { + setCheckpoint (edgeTags, span, ctx = null, payloadSize = 0) { if (!this.enabled) return null const nowNs = Date.now() * 1e6 const direction = edgeTags.find(t => t.startsWith('direction:')) @@ -198,7 +203,7 @@ class DataStreamsProcessor { pathwayLatencyNs: pathwayLatencyNs, payloadSize: payloadSize } - this.recordCheckpoint(checkpoint) + this.recordCheckpoint(checkpoint, span) return dataStreamsContext } diff --git a/packages/dd-trace/src/tracer.js b/packages/dd-trace/src/tracer.js index ebdf3a68b8d..afa7da037b2 100644 --- a/packages/dd-trace/src/tracer.js +++ b/packages/dd-trace/src/tracer.js @@ -31,9 +31,9 @@ class DatadogTracer extends Tracer { // todo[piochelepiotr] These two methods are not related to the tracer, but to data streams monitoring. // They should be moved outside of the tracer in the future. - setCheckpoint (edgeTags, payloadSize = 0) { + setCheckpoint (edgeTags, span, payloadSize = 0) { const ctx = this._dataStreamsProcessor.setCheckpoint( - edgeTags, DataStreamsContext.getDataStreamsContext(), payloadSize + edgeTags, span, DataStreamsContext.getDataStreamsContext(), payloadSize ) DataStreamsContext.setDataStreamsContext(ctx) return ctx From 541262811626c03eb822291292a9cecd25a85535 Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Wed, 15 Nov 2023 14:00:49 +0100 Subject: [PATCH 068/147] Update native-iast-taint-tracking to v1.6.4 (#3787) --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index e2d436fe1c5..796ab6b5c8f 100644 --- a/package.json +++ b/package.json @@ -70,7 +70,7 @@ "dependencies": { "@datadog/native-appsec": "4.0.0", "@datadog/native-iast-rewriter": "2.2.1", - "@datadog/native-iast-taint-tracking": "1.6.3", + "@datadog/native-iast-taint-tracking": "1.6.4", "@datadog/native-metrics": "^2.0.0", "@datadog/pprof": "4.0.1", "@datadog/sketches-js": "^2.1.0", diff --git a/yarn.lock b/yarn.lock index 0be504d0107..fb60fd791b5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -400,10 +400,10 @@ lru-cache "^7.14.0" node-gyp-build "^4.5.0" -"@datadog/native-iast-taint-tracking@1.6.3": - version "1.6.3" - resolved "https://registry.yarnpkg.com/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.6.3.tgz#cb2125f7bf18806da6f326c3a6b7210da3e05d8b" - integrity sha512-u/bBPNx0w8Bq+I+30enI99Ua2WPbVLkANGNyQNjW4tz2PHyeGI++HyzZV+fGm0YSy41FuHZq9EWP3SSDz/eSVw== +"@datadog/native-iast-taint-tracking@1.6.4": + version "1.6.4" + resolved "https://registry.yarnpkg.com/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.6.4.tgz#16c21ad7c36a53420c0d3c5a3720731809cc7e98" + integrity sha512-Owxk7hQ4Dxwv4zJAoMjRga0IvE6lhvxnNc8pJCHsemCWBXchjr/9bqg05Zy5JnMbKUWn4XuZeJD6RFZpRa8bfw== dependencies: node-gyp-build "^3.9.0" From a6d48ca09f4591ddf1cfed09baf5ed788b40cb47 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 15 Nov 2023 08:30:48 -0800 Subject: [PATCH 069/147] enable tracing header injection for AWS requests (#3796) - removes the code that disabled tracing headers when amazon signature headers were present - essentially reverts https://github.com/DataDog/dd-trace-js/pull/205 - was originally done to prevent breaking AWS signatures - apparently the presence of our headers does not break amazon signatures after all - fixes #3719 --- .../test/aws-sdk.spec.js | 22 +++ .../datadog-plugin-fetch/test/index.spec.js | 104 -------------- packages/datadog-plugin-http/src/client.js | 31 +--- .../datadog-plugin-http/test/client.spec.js | 118 ---------------- packages/datadog-plugin-http2/src/client.js | 27 +--- .../datadog-plugin-http2/test/client.spec.js | 133 ------------------ 6 files changed, 24 insertions(+), 411 deletions(-) diff --git a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js index aeb5d5b81fd..6ab54711d2c 100644 --- a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js @@ -114,6 +114,28 @@ describe('Plugin', () => { s3.listBuckets({}, e => e && done(e)) }) + // different versions of aws-sdk use different casings and different AWS headers + it('should include tracing headers and not cause a 403 error', (done) => { + const HttpClientPlugin = require('../../datadog-plugin-http/src/client.js') + const spy = sinon.spy(HttpClientPlugin.prototype, 'bindStart') + agent.use(traces => { + const headers = new Set( + Object.keys(spy.firstCall.firstArg.args.options.headers) + .map(x => x.toLowerCase()) + ) + spy.restore() + + expect(headers).to.include('authorization') + expect(headers).to.include('x-amz-date') + expect(headers).to.include('x-datadog-trace-id') + expect(headers).to.include('x-datadog-parent-id') + expect(headers).to.include('x-datadog-sampling-priority') + expect(headers).to.include('x-datadog-tags') + }).then(done, done) + + s3.listBuckets({}, e => e && done(e)) + }) + it('should mark error responses', (done) => { let error diff --git a/packages/datadog-plugin-fetch/test/index.spec.js b/packages/datadog-plugin-fetch/test/index.spec.js index 3ad82148b47..be6dd4d5c6d 100644 --- a/packages/datadog-plugin-fetch/test/index.spec.js +++ b/packages/datadog-plugin-fetch/test/index.spec.js @@ -232,110 +232,6 @@ describe('Plugin', () => { }) }) - it('should skip injecting if the Authorization header contains an AWS signature', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - getPort().then(port => { - appListener = server(app, port, () => { - fetch(`http://localhost:${port}/`, { - headers: { - Authorization: 'AWS4-HMAC-SHA256 ...' - } - }) - }) - }) - }) - - it('should skip injecting if one of the Authorization headers contains an AWS signature', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - getPort().then(port => { - appListener = server(app, port, () => { - fetch(`http://localhost:${port}/`, { - headers: { - Authorization: ['AWS4-HMAC-SHA256 ...'] - } - }) - }) - }) - }) - - it('should skip injecting if the X-Amz-Signature header is set', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - getPort().then(port => { - appListener = server(app, port, () => { - fetch(`http://localhost:${port}/`, { - headers: { - 'X-Amz-Signature': 'abc123' - } - }) - }) - }) - }) - - it('should skip injecting if the X-Amz-Signature query param is set', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - getPort().then(port => { - appListener = server(app, port, () => { - fetch(`http://localhost:${port}/?X-Amz-Signature=abc123`) - }) - }) - }) - it('should handle connection errors', done => { getPort().then(port => { let error diff --git a/packages/datadog-plugin-http/src/client.js b/packages/datadog-plugin-http/src/client.js index 8ea210a0ba9..8da5ecd4983 100644 --- a/packages/datadog-plugin-http/src/client.js +++ b/packages/datadog-plugin-http/src/client.js @@ -58,7 +58,7 @@ class HttpClientPlugin extends ClientPlugin { span._spanContext._trace.record = false } - if (!(hasAmazonSignature(options) || !this.config.propagationFilter(uri))) { + if (this.config.propagationFilter(uri)) { this.tracer.inject(span, HTTP_HEADERS, options.headers) } @@ -195,31 +195,6 @@ function getHooks (config) { return { request } } -function hasAmazonSignature (options) { - if (!options) { - return false - } - - if (options.headers) { - const headers = Object.keys(options.headers) - .reduce((prev, next) => Object.assign(prev, { - [next.toLowerCase()]: options.headers[next] - }), {}) - - if (headers['x-amz-signature']) { - return true - } - - if ([].concat(headers['authorization']).some(startsWith('AWS4-HMAC-SHA256'))) { - return true - } - } - - const search = options.search || options.path - - return search && search.toLowerCase().indexOf('x-amz-signature=') !== -1 -} - function extractSessionDetails (options) { if (typeof options === 'string') { return new URL(options).host @@ -231,8 +206,4 @@ function extractSessionDetails (options) { return { host, port } } -function startsWith (searchString) { - return value => String(value).startsWith(searchString) -} - module.exports = HttpClientPlugin diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index ca2a89cf67d..39f8cb8df38 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -462,124 +462,6 @@ describe('Plugin', () => { }) }) - it('should skip injecting if the Authorization header contains an AWS signature', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - getPort().then(port => { - appListener = server(app, port, () => { - const req = http.request({ - port, - headers: { - Authorization: 'AWS4-HMAC-SHA256 ...' - } - }) - - req.end() - }) - }) - }) - - it('should skip injecting if one of the Authorization headers contains an AWS signature', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - getPort().then(port => { - appListener = server(app, port, () => { - const req = http.request({ - port, - headers: { - Authorization: ['AWS4-HMAC-SHA256 ...'] - } - }) - - req.end() - }) - }) - }) - - it('should skip injecting if the X-Amz-Signature header is set', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - getPort().then(port => { - appListener = server(app, port, () => { - const req = http.request({ - port, - headers: { - 'X-Amz-Signature': 'abc123' - } - }) - - req.end() - }) - }) - }) - - it('should skip injecting if the X-Amz-Signature query param is set', done => { - const app = express() - - app.get('/', (req, res) => { - try { - expect(req.get('x-datadog-trace-id')).to.be.undefined - expect(req.get('x-datadog-parent-id')).to.be.undefined - - res.status(200).send() - - done() - } catch (e) { - done(e) - } - }) - - getPort().then(port => { - appListener = server(app, port, () => { - const req = http.request({ - port, - path: '/?X-Amz-Signature=abc123' - }) - - req.end() - }) - }) - }) - it('should run the callback in the parent context', done => { const app = express() diff --git a/packages/datadog-plugin-http2/src/client.js b/packages/datadog-plugin-http2/src/client.js index 4a60ee0b4db..8dc853c450d 100644 --- a/packages/datadog-plugin-http2/src/client.js +++ b/packages/datadog-plugin-http2/src/client.js @@ -62,9 +62,7 @@ class Http2ClientPlugin extends ClientPlugin { addHeaderTags(span, headers, HTTP_REQUEST_HEADERS, this.config) - if (!hasAmazonSignature(headers, path)) { - this.tracer.inject(span, HTTP_HEADERS, headers) - } + this.tracer.inject(span, HTTP_HEADERS, headers) message.parentStore = store message.currentStore = { ...store, span } @@ -133,29 +131,6 @@ function extractSessionDetails (authority, options) { return { protocol, port, host } } -function hasAmazonSignature (headers, path) { - if (headers) { - headers = Object.keys(headers) - .reduce((prev, next) => Object.assign(prev, { - [next.toLowerCase()]: headers[next] - }), {}) - - if (headers['x-amz-signature']) { - return true - } - - if ([].concat(headers['authorization']).some(startsWith('AWS4-HMAC-SHA256'))) { - return true - } - } - - return path && path.toLowerCase().indexOf('x-amz-signature=') !== -1 -} - -function startsWith (searchString) { - return value => String(value).startsWith(searchString) -} - function getStatusValidator (config) { if (typeof config.validateStatus === 'function') { return config.validateStatus diff --git a/packages/datadog-plugin-http2/test/client.spec.js b/packages/datadog-plugin-http2/test/client.spec.js index 89ec4cb1ab3..c19f6905222 100644 --- a/packages/datadog-plugin-http2/test/client.spec.js +++ b/packages/datadog-plugin-http2/test/client.spec.js @@ -381,139 +381,6 @@ describe('Plugin', () => { }) }) - it('should skip injecting if the Authorization header contains an AWS signature', done => { - const app = (stream, headers) => { - try { - expect(headers['x-datadog-trace-id']).to.be.undefined - expect(headers['x-datadog-parent-id']).to.be.undefined - - stream.respond({ - ':status': 200 - }) - stream.end() - - done() - } catch (e) { - done(e) - } - } - - getPort().then(port => { - appListener = server(app, port, () => { - const headers = { - Authorization: 'AWS4-HMAC-SHA256 ...' - } - const client = http2 - .connect(`${protocol}://localhost:${port}`) - .on('error', done) - - const req = client.request(headers) - req.on('error', done) - - req.end() - }) - }) - }) - - it('should skip injecting if one of the Authorization headers contains an AWS signature', done => { - const app = (stream, headers) => { - try { - expect(headers['x-datadog-trace-id']).to.be.undefined - expect(headers['x-datadog-parent-id']).to.be.undefined - - stream.respond({ - ':status': 200 - }) - stream.end() - - done() - } catch (e) { - done(e) - } - } - - getPort().then(port => { - appListener = server(app, port, () => { - const headers = { - Authorization: ['AWS4-HMAC-SHA256 ...'] - } - const client = http2 - .connect(`${protocol}://localhost:${port}`) - .on('error', done) - - const req = client.request(headers) - req.on('error', done) - - req.end() - }) - }) - }) - - it('should skip injecting if the X-Amz-Signature header is set', done => { - const app = (stream, headers) => { - try { - expect(headers['x-datadog-trace-id']).to.be.undefined - expect(headers['x-datadog-parent-id']).to.be.undefined - - stream.respond({ - ':status': 200 - }) - stream.end() - - done() - } catch (e) { - done(e) - } - } - - getPort().then(port => { - appListener = server(app, port, () => { - const headers = { - 'X-Amz-Signature': 'abc123' - } - const client = http2 - .connect(`${protocol}://localhost:${port}`) - .on('error', done) - - const req = client.request(headers) - req.on('error', done) - - req.end() - }) - }) - }) - - it('should skip injecting if the X-Amz-Signature query param is set', done => { - const app = (stream, headers) => { - try { - expect(headers['x-datadog-trace-id']).to.be.undefined - expect(headers['x-datadog-parent-id']).to.be.undefined - - stream.respond({ - ':status': 200 - }) - stream.end() - - done() - } catch (e) { - done(e) - } - } - - getPort().then(port => { - appListener = server(app, port, () => { - const client = http2 - .connect(`${protocol}://localhost:${port}`) - .on('error', done) - - const req = client.request({ ':path': '/?X-Amz-Signature=abc123' }) - req.on('error', done) - - req.end() - }) - }) - }) - it('should run the callback in the parent context', done => { const app = (stream, headers) => { stream.respond({ From 28cd45897e9096cb4dce323acf24c75ab3e1f181 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 15 Nov 2023 08:56:30 -0800 Subject: [PATCH 070/147] restify: emit on DC channels w/ async handlers (#3782) - now emits on `next` & `finish` channels when a handler / middleware is a promise - previously it would only emit on `exit` - fixes a bug where Restify routes are repeated once per async handler / middleware --- .../datadog-instrumentations/src/restify.js | 15 +++++++++- .../datadog-plugin-restify/test/index.spec.js | 29 +++++++++++++++++++ 2 files changed, 43 insertions(+), 1 deletion(-) diff --git a/packages/datadog-instrumentations/src/restify.js b/packages/datadog-instrumentations/src/restify.js index 33e6c175638..2644f916b3f 100644 --- a/packages/datadog-instrumentations/src/restify.js +++ b/packages/datadog-instrumentations/src/restify.js @@ -50,7 +50,20 @@ function wrapFn (fn) { enterChannel.publish({ req, route }) try { - return fn.apply(this, arguments) + const result = fn.apply(this, arguments) + if (result && typeof result === 'object' && typeof result.then === 'function') { + return result.then(function () { + nextChannel.publish({ req }) + finishChannel.publish({ req }) + return arguments + }).catch(function (error) { + errorChannel.publish({ req, error }) + nextChannel.publish({ req }) + finishChannel.publish({ req }) + throw error + }) + } + return result } catch (error) { errorChannel.publish({ req, error }) nextChannel.publish({ req }) diff --git a/packages/datadog-plugin-restify/test/index.spec.js b/packages/datadog-plugin-restify/test/index.spec.js index 26dd14a2031..ea7a5f17aa7 100644 --- a/packages/datadog-plugin-restify/test/index.spec.js +++ b/packages/datadog-plugin-restify/test/index.spec.js @@ -85,6 +85,35 @@ describe('Plugin', () => { }) }) + it('should support routing with async functions and middleware', done => { + const server = restify.createServer() + + server.get( + '/user/:id', + async function middleware () {}, + async function handler (req, res) { + res.send('hello, ' + req.params.id) + } + ) + + getPort().then(port => { + agent + .use(traces => { + expect(traces[0][0]).to.have.property('resource', 'GET /user/:id') + expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/user/123`) + expect(traces[0][0].meta).to.have.property('component', 'restify') + }) + .then(done) + .catch(done) + + appListener = server.listen(port, 'localhost', () => { + axios + .get(`http://localhost:${port}/user/123`) + .catch(done) + }) + }) + }) + it('should run handlers in the request scope', done => { const server = restify.createServer() const interval = setInterval(() => { From 79a70fe0f64ab4e3ac425146cb899f4bb513896d Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 15 Nov 2023 08:57:18 -0800 Subject: [PATCH 071/147] core: enable 128-bit ids by default for v5+ (#3656) - makes 128 bit ID generation the default - but only for v5+ of the tracer --- packages/datadog-plugin-mysql/test/index.spec.js | 15 +++++++++++++-- packages/datadog-plugin-mysql2/test/index.spec.js | 15 +++++++++++++-- packages/datadog-plugin-pg/test/index.spec.js | 15 +++++++++++++-- packages/dd-trace/src/config.js | 4 +++- packages/dd-trace/src/opentracing/span_context.js | 7 +++++-- packages/dd-trace/src/plugins/database.js | 2 +- packages/dd-trace/test/config.spec.js | 2 +- 7 files changed, 49 insertions(+), 11 deletions(-) diff --git a/packages/datadog-plugin-mysql/test/index.spec.js b/packages/datadog-plugin-mysql/test/index.spec.js index 6c29d2bf9d7..7c43a2cd65d 100644 --- a/packages/datadog-plugin-mysql/test/index.spec.js +++ b/packages/datadog-plugin-mysql/test/index.spec.js @@ -3,6 +3,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const proxyquire = require('proxyquire').noPreserveCache() const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') +const { DD_MAJOR } = require('../../../version') const { expectedSchema, rawExpectedSchema } = require('./naming') @@ -454,14 +455,19 @@ describe('Plugin', () => { it('query text should contain traceparent', done => { let queryText = '' agent.use(traces => { - const traceId = traces[0][0].trace_id.toString(16).padStart(32, '0') + const expectedTimePrefix = DD_MAJOR >= 5 + ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') + : '0000000000000000' + const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(queryText).to.equal( `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0',` + `traceparent='00-${traceId}-${spanId}-00'*/ SELECT 1 + 1 AS solution`) }).then(done, done) + const clock = sinon.useFakeTimers(new Date()) connection.query('SELECT 1 + 1 AS solution', () => { + clock.restore() queryText = connection._protocol._queue[0].sql }) }) @@ -531,14 +537,19 @@ describe('Plugin', () => { it('query text should contain traceparent', done => { let queryText = '' agent.use(traces => { - const traceId = traces[0][0].trace_id.toString(16).padStart(32, '0') + const expectedTimePrefix = DD_MAJOR >= 5 + ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') + : '0000000000000000' + const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(queryText).to.equal( `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0',` + `traceparent='00-${traceId}-${spanId}-00'*/ SELECT 1 + 1 AS solution`) }).then(done, done) + const clock = sinon.useFakeTimers(new Date()) pool.query('SELECT 1 + 1 AS solution', () => { + clock.restore() queryText = pool._allConnections[0]._protocol._queue[0].sql }) }) diff --git a/packages/datadog-plugin-mysql2/test/index.spec.js b/packages/datadog-plugin-mysql2/test/index.spec.js index 692332497f4..caab21f35b6 100644 --- a/packages/datadog-plugin-mysql2/test/index.spec.js +++ b/packages/datadog-plugin-mysql2/test/index.spec.js @@ -3,6 +3,7 @@ const agent = require('../../dd-trace/test/plugins/agent') const proxyquire = require('proxyquire').noPreserveCache() const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') +const { DD_MAJOR } = require('../../../version') const { expectedSchema, rawExpectedSchema } = require('./naming') @@ -444,14 +445,19 @@ describe('Plugin', () => { it('query text should contain traceparent', done => { let queryText = '' agent.use(traces => { - const traceId = traces[0][0].trace_id.toString(16).padStart(32, '0') + const expectedTimePrefix = DD_MAJOR >= 5 + ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') + : '0000000000000000' + const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(queryText).to.equal( `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0',` + `traceparent='00-${traceId}-${spanId}-00'*/ SELECT 1 + 1 AS solution`) }).then(done, done) + const clock = sinon.useFakeTimers(new Date()) const connect = connection.query('SELECT 1 + 1 AS solution', () => { + clock.restore() queryText = connect.sql }) }) @@ -521,14 +527,19 @@ describe('Plugin', () => { it('query text should contain traceparent', done => { let queryText = '' agent.use(traces => { - const traceId = traces[0][0].trace_id.toString(16).padStart(32, '0') + const expectedTimePrefix = DD_MAJOR >= 5 + ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') + : '0000000000000000' + const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(queryText).to.equal( `/*dddbs='post',dde='tester',ddps='test',ddpv='8.4.0',` + `traceparent='00-${traceId}-${spanId}-00'*/ SELECT 1 + 1 AS solution`) }).then(done, done) + const clock = sinon.useFakeTimers(new Date()) const queryPool = pool.query('SELECT 1 + 1 AS solution', () => { + clock.restore() queryText = queryPool.sql }) }) diff --git a/packages/datadog-plugin-pg/test/index.spec.js b/packages/datadog-plugin-pg/test/index.spec.js index 5fae903c58f..a1119e935bc 100644 --- a/packages/datadog-plugin-pg/test/index.spec.js +++ b/packages/datadog-plugin-pg/test/index.spec.js @@ -7,6 +7,7 @@ const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/c const net = require('net') const { expectedSchema, rawExpectedSchema } = require('./naming') const EventEmitter = require('events') +const { DD_MAJOR } = require('../../../version') const clients = { pg: pg => pg.Client @@ -483,13 +484,18 @@ describe('Plugin', () => { it('query text should contain traceparent', done => { agent.use(traces => { - const traceId = traces[0][0].trace_id.toString(16).padStart(32, '0') + const expectedTimePrefix = DD_MAJOR >= 5 + ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') + : '0000000000000000' + const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(seenTraceId).to.equal(traceId) expect(seenSpanId).to.equal(spanId) }).then(done, done) + const clock = sinon.useFakeTimers(new Date()) client.query('SELECT $1::text as message', ['Hello World!'], (err, result) => { + clock.restore() if (err) return done(err) expect(seenTraceParent).to.be.true client.end((err) => { @@ -562,7 +568,10 @@ describe('Plugin', () => { } agent.use(traces => { - const traceId = traces[0][0].trace_id.toString(16).padStart(32, '0') + const expectedTimePrefix = DD_MAJOR >= 5 + ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') + : '0000000000000000' + const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(queryText).to.equal( @@ -570,7 +579,9 @@ describe('Plugin', () => { `traceparent='00-${traceId}-${spanId}-00'*/ SELECT $1::text as message`) }).then(done, done) + const clock = sinon.useFakeTimers(new Date()) client.query(query, ['Hello world!'], (err) => { + clock.restore() if (err) return done(err) client.end((err) => { diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 55932e5f74e..0dc579c6758 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -13,6 +13,7 @@ const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('./plugins/util/tags') const { getGitMetadataFromGitProperties, removeUserSensitiveInfo } = require('./git_properties') const { updateConfig } = require('./telemetry') const { getIsGCPFunction, getIsAzureFunctionConsumptionPlan } = require('./serverless') +const { DD_MAJOR } = require('../../../version') const fromEntries = Object.fromEntries || (entries => entries.reduce((obj, [k, v]) => Object.assign(obj, { [k]: v }), {})) @@ -368,10 +369,11 @@ class Config { isGCPFunction || isAzureFunctionConsumptionPlan ) + // the tracer generates 128 bit IDs by default as of v5 const DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED = coalesce( options.traceId128BitGenerationEnabled, process.env.DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED, - false + DD_MAJOR >= 5 ) const DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED = coalesce( diff --git a/packages/dd-trace/src/opentracing/span_context.js b/packages/dd-trace/src/opentracing/span_context.js index a788d8ab2cd..aaa0ae26bc0 100644 --- a/packages/dd-trace/src/opentracing/span_context.js +++ b/packages/dd-trace/src/opentracing/span_context.js @@ -2,6 +2,9 @@ const { AUTO_KEEP } = require('../../../../ext/priority') +// the lowercase, hex encoded upper 64 bits of a 128-bit trace id, if present +const TRACE_ID_128 = '_dd.p.tid' + class DatadogSpanContext { constructor (props) { props = props || {} @@ -35,8 +38,8 @@ class DatadogSpanContext { toTraceparent () { const flags = this._sampling.priority >= AUTO_KEEP ? '01' : '00' - const traceId = this._traceId.toBuffer().length <= 8 && this._trace.tags['_dd.p.tid'] - ? this._trace.tags['_dd.p.tid'] + this._traceId.toString(16).padStart(16, '0') + const traceId = this._traceId.toBuffer().length <= 8 && this._trace.tags[TRACE_ID_128] + ? this._trace.tags[TRACE_ID_128] + this._traceId.toString(16).padStart(16, '0') : this._traceId.toString(16).padStart(32, '0') const spanId = this._spanId.toString(16).padStart(16, '0') const version = (this._traceparent && this._traceparent.version) || '00' diff --git a/packages/dd-trace/src/plugins/database.js b/packages/dd-trace/src/plugins/database.js index 9e82923012b..a868c594db7 100644 --- a/packages/dd-trace/src/plugins/database.js +++ b/packages/dd-trace/src/plugins/database.js @@ -36,7 +36,7 @@ class DatabasePlugin extends StoragePlugin { const { encodedDddbs, encodedDde, encodedDdps, encodedDdpv } = this.serviceTags return `dddbs='${encodedDddbs}',dde='${encodedDde}',` + - `ddps='${encodedDdps}',ddpv='${encodedDdpv}'` + `ddps='${encodedDdps}',ddpv='${encodedDdpv}'` } getDbmServiceName (span, tracerService) { diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 3016980094b..877729e0ebb 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -89,7 +89,7 @@ describe('Config', () => { expect(config).to.have.property('reportHostname', false) expect(config).to.have.property('scope', undefined) expect(config).to.have.property('logLevel', 'debug') - expect(config).to.have.property('traceId128BitGenerationEnabled', false) + expect(config).to.have.property('traceId128BitGenerationEnabled', true) expect(config).to.have.property('traceId128BitLoggingEnabled', false) expect(config).to.have.property('spanAttributeSchema', 'v0') expect(config).to.have.property('spanComputePeerService', false) From 4e8cbc6de7864b6097664e95e4461db8f7e3c275 Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Wed, 15 Nov 2023 14:08:13 -0500 Subject: [PATCH 072/147] fix webpack build errors in next.js test (#3798) --- packages/datadog-plugin-next/test/next.config.js | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/datadog-plugin-next/test/next.config.js b/packages/datadog-plugin-next/test/next.config.js index ef21c698c39..b71d4bc998b 100644 --- a/packages/datadog-plugin-next/test/next.config.js +++ b/packages/datadog-plugin-next/test/next.config.js @@ -11,6 +11,13 @@ const config = { experimental: {} } +// Ensure webpack 5 is used by default for older versions +if (satisfies(VERSION, '<11')) { + config.future = { + webpack5: true + } +} + // In older versions of Next.js (11.0.1 and before), the webpack config doesn't support 'node' prefixes by default // So, any "node" prefixes are replaced for these older versions by this webpack plugin // Additionally, webpack was having problems with our use of 'worker_threads', so we don't resolve it @@ -24,9 +31,11 @@ if (satisfies(VERSION, '<11.1.0')) { config.resolve.preferRelative = true + // for future errors, any node:* module that produces a webpack build error should be added here config.resolve.fallback = { ...config.resolve.fallback, - worker_threads: false + worker_threads: false, + perf_hooks: false } return config From 27d884be15b331484396e4c981873d6051720e99 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Thu, 16 Nov 2023 03:15:07 +0800 Subject: [PATCH 073/147] Otel span name translator (#3766) * Set special span attributes * Add span name mapper --- ext/kinds.d.ts | 1 + ext/kinds.js | 3 +- packages/dd-trace/src/format.js | 7 +- packages/dd-trace/src/opentelemetry/span.js | 97 +++++++++- packages/dd-trace/src/opentelemetry/tracer.js | 19 +- packages/dd-trace/test/format.spec.js | 10 + .../dd-trace/test/opentelemetry/span.spec.js | 179 ++++++++++++++++++ 7 files changed, 302 insertions(+), 14 deletions(-) diff --git a/ext/kinds.d.ts b/ext/kinds.d.ts index a2050920e96..53df2e3cc12 100644 --- a/ext/kinds.d.ts +++ b/ext/kinds.d.ts @@ -3,6 +3,7 @@ declare const kinds: { CLIENT: 'client' PRODUCER: 'producer' CONSUMER: 'consumer' + INTERNAL: 'internal' } export = kinds diff --git a/ext/kinds.js b/ext/kinds.js index 187d9ee0c08..b000bcc7930 100644 --- a/ext/kinds.js +++ b/ext/kinds.js @@ -4,5 +4,6 @@ module.exports = { SERVER: 'server', CLIENT: 'client', PRODUCER: 'producer', - CONSUMER: 'consumer' + CONSUMER: 'consumer', + INTERNAL: 'internal' } diff --git a/packages/dd-trace/src/format.js b/packages/dd-trace/src/format.js index 947f5ed3048..cbe41458a83 100644 --- a/packages/dd-trace/src/format.js +++ b/packages/dd-trace/src/format.js @@ -14,7 +14,7 @@ const SPAN_SAMPLING_MECHANISM = constants.SPAN_SAMPLING_MECHANISM const SPAN_SAMPLING_RULE_RATE = constants.SPAN_SAMPLING_RULE_RATE const SPAN_SAMPLING_MAX_PER_SECOND = constants.SPAN_SAMPLING_MAX_PER_SECOND const SAMPLING_MECHANISM_SPAN = constants.SAMPLING_MECHANISM_SPAN -const { MEASURED, BASE_SERVICE } = tags +const { MEASURED, BASE_SERVICE, ANALYTICS } = tags const ORIGIN_KEY = constants.ORIGIN_KEY const HOSTNAME_KEY = constants.HOSTNAME_KEY const TOP_LEVEL_KEY = constants.TOP_LEVEL_KEY @@ -24,6 +24,7 @@ const ERROR_STACK = constants.ERROR_STACK const ERROR_TYPE = constants.ERROR_TYPE const map = { + 'operation.name': 'name', 'service.name': 'service', 'span.type': 'type', 'resource.name': 'resource' @@ -83,6 +84,7 @@ function extractTags (trace, span) { for (const tag in tags) { switch (tag) { + case 'operation.name': case 'service.name': case 'span.type': case 'resource.name': @@ -92,6 +94,9 @@ function extractTags (trace, span) { case 'http.status_code': addTag(trace.meta, {}, tag, tags[tag] && String(tags[tag])) break + case 'analytics.event': + addTag({}, trace.metrics, ANALYTICS, tags[tag] === undefined || tags[tag] ? 1 : 0) + break case HOSTNAME_KEY: case MEASURED: addTag({}, trace.metrics, tag, tags[tag] === undefined || tags[tag] ? 1 : 0) diff --git a/packages/dd-trace/src/opentelemetry/span.js b/packages/dd-trace/src/opentelemetry/span.js index 8a8beea4169..2ff7a37c577 100644 --- a/packages/dd-trace/src/opentelemetry/span.js +++ b/packages/dd-trace/src/opentelemetry/span.js @@ -11,6 +11,7 @@ const tracer = require('../../') const DatadogSpan = require('../opentracing/span') const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../constants') const { SERVICE_NAME, RESOURCE_NAME } = require('../../../../ext/tags') +const kinds = require('../../../../ext/kinds') const SpanContext = require('./span_context') @@ -19,6 +20,93 @@ function hrTimeToMilliseconds (time) { return time[0] * 1e3 + time[1] / 1e6 } +const spanKindNames = { + [api.SpanKind.INTERNAL]: kinds.INTERNAL, + [api.SpanKind.SERVER]: kinds.SERVER, + [api.SpanKind.CLIENT]: kinds.CLIENT, + [api.SpanKind.PRODUCER]: kinds.PRODUCER, + [api.SpanKind.CONSUMER]: kinds.CONSUMER +} + +/** + * Several of these attributes are not yet supported by the Node.js OTel API. + * We check for old equivalents where we can, but not all had equivalents. + */ +function spanNameMapper (spanName, kind, attributes) { + if (spanName) return spanName + + const opName = attributes['operation.name'] + if (opName) return opName + + const { INTERNAL, SERVER, CLIENT } = api.SpanKind + + // HTTP server and client requests + // TODO: Drop http.method when http.request.method is supported. + for (const key of ['http.method', 'http.request.method']) { + if (key in attributes) { + if (kind === SERVER) { + return 'http.server.request' + } + if (kind === CLIENT) { + return 'http.client.request' + } + } + } + + // Databases + const dbSystem = attributes['db.system'] + if (dbSystem && kind === CLIENT) { + return `${dbSystem}.query` + } + + // Messaging + const msgSys = attributes['messaging.system'] + const msgOp = attributes['messaging.operation'] + if (msgSys && msgOp && kind !== INTERNAL) { + return `${msgSys}.${msgOp}` + } + + // RPC (and AWS) + const rpcSystem = attributes['rpc.system'] + if (rpcSystem) { + if (kind === CLIENT) { + return rpcSystem === 'aws-api' + ? `aws.${attributes['rpc.service'] || 'client'}.request` + : `${rpcSystem}.client.request` + } + if (kind === SERVER) { + return `${rpcSystem}.server.request` + } + } + + // FaaS + const faasProvider = attributes['faas.invoked_provider'] + const faasName = attributes['faas.invoked_name'] + const faasTrigger = attributes['faas.trigger'] + if (kind === CLIENT && faasProvider && faasName) { + return `${faasProvider}.${faasName}.invoke` + } + if (kind === SERVER && faasTrigger) { + return `${faasTrigger}.invoke` + } + + // GraphQL + // NOTE: Not part of Semantic Convention spec yet, but is used in the GraphQL + // integration. + const isGraphQL = 'graphql.operation.type' in attributes + if (isGraphQL) return 'graphql.server.request' + + // Network + // TODO: Doesn't exist yet. No equivalent. + const protocol = attributes['network.protocol.name'] + const protocolPrefix = protocol ? `${protocol}.` : '' + if (kind === SERVER) return `${protocolPrefix}server.request` + if (kind === CLIENT) return `${protocolPrefix}client.request` + + // If all else fails, default to stringified span.kind. + return spanKindNames[kind] +} + class Span { constructor ( parentTracer, @@ -27,7 +115,8 @@ class Span { spanContext, kind, links = [], - timeInput + timeInput, + attributes ) { const { _tracer } = tracer @@ -35,7 +124,7 @@ class Span { const startTime = hrTimeToMilliseconds(hrStartTime) this._ddSpan = new DatadogSpan(_tracer, _tracer._processor, _tracer._prioritySampler, { - operationName: spanName, + operationName: spanNameMapper(spanName, kind, attributes), context: spanContext._ddContext, startTime, hostname: _tracer._hostname, @@ -46,6 +135,10 @@ class Span { } }, _tracer._debug) + if (attributes) { + this.setAttributes(attributes) + } + this._parentTracer = parentTracer this._context = context diff --git a/packages/dd-trace/src/opentelemetry/tracer.js b/packages/dd-trace/src/opentelemetry/tracer.js index e238623f6ba..d3422300ef2 100644 --- a/packages/dd-trace/src/opentelemetry/tracer.js +++ b/packages/dd-trace/src/opentelemetry/tracer.js @@ -78,23 +78,22 @@ class Tracer { // return api.trace.wrapSpanContext(spanContext) // } - const span = new Span( + return new Span( this, context, name, spanContext, spanKind, links, - options.startTime + options.startTime, + + // Set initial span attributes. The attributes object may have been mutated + // by the sampler, so we sanitize the merged attributes before setting them. + sanitizeAttributes( + // Object.assign(attributes, samplingResult.attributes) + attributes + ) ) - // Set initial span attributes. The attributes object may have been mutated - // by the sampler, so we sanitize the merged attributes before setting them. - const initAttributes = sanitizeAttributes( - // Object.assign(attributes, samplingResult.attributes) - attributes - ) - span.setAttributes(initAttributes) - return span } startActiveSpan (name, options, context, fn) { diff --git a/packages/dd-trace/test/format.spec.js b/packages/dd-trace/test/format.spec.js index aca01012344..d6f218156b4 100644 --- a/packages/dd-trace/test/format.spec.js +++ b/packages/dd-trace/test/format.spec.js @@ -116,12 +116,14 @@ describe('format', () => { }) it('should extract Datadog specific tags', () => { + spanContext._tags['operation.name'] = 'name' spanContext._tags['service.name'] = 'service' spanContext._tags['span.type'] = 'type' spanContext._tags['resource.name'] = 'resource' trace = format(span) + expect(trace.name).to.equal('name') expect(trace.service).to.equal('service') expect(trace.type).to.equal('type') expect(trace.resource).to.equal('resource') @@ -450,5 +452,13 @@ describe('format', () => { format(span) }) + + it('should capture analytics.event', () => { + spanContext._tags['analytics.event'] = 1 + + trace = format(span) + + expect(trace.metrics).to.have.property('_dd1.sr.eausr', 1) + }) }) }) diff --git a/packages/dd-trace/test/opentelemetry/span.spec.js b/packages/dd-trace/test/opentelemetry/span.spec.js index f5633672848..48dd3f6076f 100644 --- a/packages/dd-trace/test/opentelemetry/span.spec.js +++ b/packages/dd-trace/test/opentelemetry/span.spec.js @@ -6,12 +6,22 @@ const { expect } = require('chai') const tracer = require('../../').init() +const api = require('@opentelemetry/api') const TracerProvider = require('../../src/opentelemetry/tracer_provider') const SpanContext = require('../../src/opentelemetry/span_context') const { NoopSpanProcessor } = require('../../src/opentelemetry/span_processor') const { ERROR_MESSAGE, ERROR_STACK, ERROR_TYPE } = require('../../src/constants') const { SERVICE_NAME, RESOURCE_NAME } = require('../../../../ext/tags') +const kinds = require('../../../../ext/kinds') + +const spanKindNames = { + [api.SpanKind.INTERNAL]: kinds.INTERNAL, + [api.SpanKind.SERVER]: kinds.SERVER, + [api.SpanKind.CLIENT]: kinds.CLIENT, + [api.SpanKind.PRODUCER]: kinds.PRODUCER, + [api.SpanKind.CONSUMER]: kinds.CONSUMER +} function makeSpan (...args) { const tracerProvider = new TracerProvider() @@ -43,6 +53,175 @@ describe('OTel Span', () => { expect(span.name).to.equal('name') }) + describe('span name default mapping', () => { + // Explicitly named operation + it('should map span name from operation.name', () => { + const span = makeSpan(undefined, { + attributes: { + 'operation.name': 'test' + } + }) + + expect(span.name).to.equal('test') + }) + + // HTTP server and client requests + for (const key of ['http.method', 'http.request.method']) { + for (const kind of [api.SpanKind.CLIENT, api.SpanKind.SERVER]) { + const kindName = spanKindNames[kind] + it(`should map span name from ${kindName} kind with ${key}`, () => { + const span = makeSpan(undefined, { kind, attributes: { [key]: 'GET' } }) + expect(span.name).to.equal(`http.${kindName}.request`) + }) + } + } + + // Database operations + it('should map span name from db.system if client kind', () => { + const span = makeSpan(undefined, { + kind: api.SpanKind.CLIENT, + attributes: { + 'db.system': 'mysql' + } + }) + + expect(span.name).to.equal('mysql.query') + }) + + // Messaging systems + for (const kind of [ + api.SpanKind.CLIENT, + api.SpanKind.SERVER, + api.SpanKind.PRODUCER, + api.SpanKind.CONSUMER + ]) { + const kindName = spanKindNames[kind] + it(`should map span name from messaging.system and messaging.operation when ${kindName} kind`, () => { + const attributes = { + 'messaging.system': kindName, + 'messaging.operation': 'send' + } + const span = makeSpan(undefined, { kind, attributes }) + expect(span.name).to.equal(`${kindName}.send`) + }) + } + + // AWS client request + it('should map span name from rpc.system of aws-api if client kind', () => { + const span = makeSpan(undefined, { + kind: api.SpanKind.CLIENT, + attributes: { + 'rpc.system': 'aws-api' + } + }) + + expect(span.name).to.equal('aws.client.request') + }) + + it('should map span name from rpc.system of aws-api with rpc.service if client kind', () => { + const span = makeSpan(undefined, { + kind: api.SpanKind.CLIENT, + attributes: { + 'rpc.system': 'aws-api', + 'rpc.service': 's3' + } + }) + + expect(span.name).to.equal('aws.s3.request') + }) + + // RPC client and server requests + for (const kind of [api.SpanKind.CLIENT, api.SpanKind.SERVER]) { + const kindName = spanKindNames[kind] + it(`should map span name from other rpc.system if ${kindName} kind`, () => { + const span = makeSpan(undefined, { + kind, + attributes: { + 'rpc.system': 'system' + } + }) + + expect(span.name).to.equal(`system.${kindName}.request`) + }) + } + + // FaaS invocations + it('should map span name from faas.invoked_provider and faas.invoked_name if client kind', () => { + const span = makeSpan(undefined, { + kind: api.SpanKind.CLIENT, + attributes: { + 'faas.invoked_provider': 'provider', + 'faas.invoked_name': 'name' + } + }) + + expect(span.name).to.equal('provider.name.invoke') + }) + + it('should map span name from faas.trigger if server kind', () => { + const span = makeSpan(undefined, { + kind: api.SpanKind.SERVER, + attributes: { + 'faas.trigger': 'trigger' + } + }) + + expect(span.name).to.equal('trigger.invoke') + }) + + // GraphQL + it('should map span name from graphql.operation.type', () => { + const span = makeSpan(undefined, { + attributes: { + 'graphql.operation.type': 'query' + } + }) + + expect(span.name).to.equal('graphql.server.request') + }) + + // Network + for (const kind of [api.SpanKind.CLIENT, api.SpanKind.SERVER]) { + const kindName = spanKindNames[kind] + + it(`should map span name when ${kindName} kind with network.protocol.name`, () => { + const span = makeSpan(undefined, { + kind: kind, + attributes: { + 'network.protocol.name': 'protocol' + } + }) + + expect(span.name).to.equal(`protocol.${kindName}.request`) + }) + + it(`should map span name when ${kindName} kind without network.protocol.name`, () => { + const span = makeSpan(undefined, { + kind: kind + }) + + expect(span.name).to.equal(`${kindName}.request`) + }) + } + + // Default to span.kind + for (const kind of [ + api.SpanKind.INTERNAL, + api.SpanKind.PRODUCER, + api.SpanKind.CONSUMER + ]) { + const kindName = spanKindNames[kind] + it(`should map span name with ${kindName} kind`, () => { + const span = makeSpan(undefined, { kind }) + expect(span.name).to.equal(kindName) + }) + } + it(`should map span name with default span kind of internal`, () => { + const span = makeSpan() + expect(span.name).to.equal('internal') + }) + }) + it('should copy span name to resource.name', () => { const span = makeSpan('name') From f0c49dbd34655e3a4251675a0aafa8d64c8dd293 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 15 Nov 2023 12:50:23 -0800 Subject: [PATCH 074/147] make 128bit ids default even in v4.x release line (#3800) - sort of a partial revert for #3656 - makes 128 bit ID generation the default for v4.x release line, not just upcoming v5.x - this change isn't a major breaking change after all --- packages/datadog-plugin-mysql/test/index.spec.js | 9 ++------- packages/datadog-plugin-mysql2/test/index.spec.js | 9 ++------- packages/datadog-plugin-pg/test/index.spec.js | 9 ++------- packages/dd-trace/src/config.js | 3 +-- 4 files changed, 7 insertions(+), 23 deletions(-) diff --git a/packages/datadog-plugin-mysql/test/index.spec.js b/packages/datadog-plugin-mysql/test/index.spec.js index 7c43a2cd65d..12b4e45457a 100644 --- a/packages/datadog-plugin-mysql/test/index.spec.js +++ b/packages/datadog-plugin-mysql/test/index.spec.js @@ -3,7 +3,6 @@ const agent = require('../../dd-trace/test/plugins/agent') const proxyquire = require('proxyquire').noPreserveCache() const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') -const { DD_MAJOR } = require('../../../version') const { expectedSchema, rawExpectedSchema } = require('./naming') @@ -455,9 +454,7 @@ describe('Plugin', () => { it('query text should contain traceparent', done => { let queryText = '' agent.use(traces => { - const expectedTimePrefix = DD_MAJOR >= 5 - ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') - : '0000000000000000' + const expectedTimePrefix = Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') @@ -537,9 +534,7 @@ describe('Plugin', () => { it('query text should contain traceparent', done => { let queryText = '' agent.use(traces => { - const expectedTimePrefix = DD_MAJOR >= 5 - ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') - : '0000000000000000' + const expectedTimePrefix = Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') diff --git a/packages/datadog-plugin-mysql2/test/index.spec.js b/packages/datadog-plugin-mysql2/test/index.spec.js index caab21f35b6..38d6c487eaa 100644 --- a/packages/datadog-plugin-mysql2/test/index.spec.js +++ b/packages/datadog-plugin-mysql2/test/index.spec.js @@ -3,7 +3,6 @@ const agent = require('../../dd-trace/test/plugins/agent') const proxyquire = require('proxyquire').noPreserveCache() const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') -const { DD_MAJOR } = require('../../../version') const { expectedSchema, rawExpectedSchema } = require('./naming') @@ -445,9 +444,7 @@ describe('Plugin', () => { it('query text should contain traceparent', done => { let queryText = '' agent.use(traces => { - const expectedTimePrefix = DD_MAJOR >= 5 - ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') - : '0000000000000000' + const expectedTimePrefix = Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') @@ -527,9 +524,7 @@ describe('Plugin', () => { it('query text should contain traceparent', done => { let queryText = '' agent.use(traces => { - const expectedTimePrefix = DD_MAJOR >= 5 - ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') - : '0000000000000000' + const expectedTimePrefix = Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') diff --git a/packages/datadog-plugin-pg/test/index.spec.js b/packages/datadog-plugin-pg/test/index.spec.js index a1119e935bc..ae3f0dcdcc8 100644 --- a/packages/datadog-plugin-pg/test/index.spec.js +++ b/packages/datadog-plugin-pg/test/index.spec.js @@ -7,7 +7,6 @@ const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/c const net = require('net') const { expectedSchema, rawExpectedSchema } = require('./naming') const EventEmitter = require('events') -const { DD_MAJOR } = require('../../../version') const clients = { pg: pg => pg.Client @@ -484,9 +483,7 @@ describe('Plugin', () => { it('query text should contain traceparent', done => { agent.use(traces => { - const expectedTimePrefix = DD_MAJOR >= 5 - ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') - : '0000000000000000' + const expectedTimePrefix = Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') expect(seenTraceId).to.equal(traceId) @@ -568,9 +565,7 @@ describe('Plugin', () => { } agent.use(traces => { - const expectedTimePrefix = DD_MAJOR >= 5 - ? Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') - : '0000000000000000' + const expectedTimePrefix = Math.floor(clock.now / 1000).toString(16).padStart(8, '0').padEnd(16, '0') const traceId = expectedTimePrefix + traces[0][0].trace_id.toString(16).padStart(16, '0') const spanId = traces[0][0].span_id.toString(16).padStart(16, '0') diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 0dc579c6758..b1800e21543 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -13,7 +13,6 @@ const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('./plugins/util/tags') const { getGitMetadataFromGitProperties, removeUserSensitiveInfo } = require('./git_properties') const { updateConfig } = require('./telemetry') const { getIsGCPFunction, getIsAzureFunctionConsumptionPlan } = require('./serverless') -const { DD_MAJOR } = require('../../../version') const fromEntries = Object.fromEntries || (entries => entries.reduce((obj, [k, v]) => Object.assign(obj, { [k]: v }), {})) @@ -373,7 +372,7 @@ class Config { const DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED = coalesce( options.traceId128BitGenerationEnabled, process.env.DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED, - DD_MAJOR >= 5 + true ) const DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED = coalesce( From 097bf23f7d1ba6170cfd8dfec0fa2d9814132910 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 15 Nov 2023 12:50:59 -0800 Subject: [PATCH 075/147] Revert "enable tracing header injection for AWS requests (#3796)" (#3799) - reverts PR #3796 - reverts commit 14c1eb0ba7bb1648881affb1f1aa520f94bc5fbc - @rochdev and @astuyve are both confident this is a breaking change --- .../test/aws-sdk.spec.js | 22 --- .../datadog-plugin-fetch/test/index.spec.js | 104 ++++++++++++++ packages/datadog-plugin-http/src/client.js | 31 +++- .../datadog-plugin-http/test/client.spec.js | 118 ++++++++++++++++ packages/datadog-plugin-http2/src/client.js | 27 +++- .../datadog-plugin-http2/test/client.spec.js | 133 ++++++++++++++++++ 6 files changed, 411 insertions(+), 24 deletions(-) diff --git a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js index 6ab54711d2c..aeb5d5b81fd 100644 --- a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js @@ -114,28 +114,6 @@ describe('Plugin', () => { s3.listBuckets({}, e => e && done(e)) }) - // different versions of aws-sdk use different casings and different AWS headers - it('should include tracing headers and not cause a 403 error', (done) => { - const HttpClientPlugin = require('../../datadog-plugin-http/src/client.js') - const spy = sinon.spy(HttpClientPlugin.prototype, 'bindStart') - agent.use(traces => { - const headers = new Set( - Object.keys(spy.firstCall.firstArg.args.options.headers) - .map(x => x.toLowerCase()) - ) - spy.restore() - - expect(headers).to.include('authorization') - expect(headers).to.include('x-amz-date') - expect(headers).to.include('x-datadog-trace-id') - expect(headers).to.include('x-datadog-parent-id') - expect(headers).to.include('x-datadog-sampling-priority') - expect(headers).to.include('x-datadog-tags') - }).then(done, done) - - s3.listBuckets({}, e => e && done(e)) - }) - it('should mark error responses', (done) => { let error diff --git a/packages/datadog-plugin-fetch/test/index.spec.js b/packages/datadog-plugin-fetch/test/index.spec.js index be6dd4d5c6d..3ad82148b47 100644 --- a/packages/datadog-plugin-fetch/test/index.spec.js +++ b/packages/datadog-plugin-fetch/test/index.spec.js @@ -232,6 +232,110 @@ describe('Plugin', () => { }) }) + it('should skip injecting if the Authorization header contains an AWS signature', done => { + const app = express() + + app.get('/', (req, res) => { + try { + expect(req.get('x-datadog-trace-id')).to.be.undefined + expect(req.get('x-datadog-parent-id')).to.be.undefined + + res.status(200).send() + + done() + } catch (e) { + done(e) + } + }) + + getPort().then(port => { + appListener = server(app, port, () => { + fetch(`http://localhost:${port}/`, { + headers: { + Authorization: 'AWS4-HMAC-SHA256 ...' + } + }) + }) + }) + }) + + it('should skip injecting if one of the Authorization headers contains an AWS signature', done => { + const app = express() + + app.get('/', (req, res) => { + try { + expect(req.get('x-datadog-trace-id')).to.be.undefined + expect(req.get('x-datadog-parent-id')).to.be.undefined + + res.status(200).send() + + done() + } catch (e) { + done(e) + } + }) + + getPort().then(port => { + appListener = server(app, port, () => { + fetch(`http://localhost:${port}/`, { + headers: { + Authorization: ['AWS4-HMAC-SHA256 ...'] + } + }) + }) + }) + }) + + it('should skip injecting if the X-Amz-Signature header is set', done => { + const app = express() + + app.get('/', (req, res) => { + try { + expect(req.get('x-datadog-trace-id')).to.be.undefined + expect(req.get('x-datadog-parent-id')).to.be.undefined + + res.status(200).send() + + done() + } catch (e) { + done(e) + } + }) + + getPort().then(port => { + appListener = server(app, port, () => { + fetch(`http://localhost:${port}/`, { + headers: { + 'X-Amz-Signature': 'abc123' + } + }) + }) + }) + }) + + it('should skip injecting if the X-Amz-Signature query param is set', done => { + const app = express() + + app.get('/', (req, res) => { + try { + expect(req.get('x-datadog-trace-id')).to.be.undefined + expect(req.get('x-datadog-parent-id')).to.be.undefined + + res.status(200).send() + + done() + } catch (e) { + done(e) + } + }) + + getPort().then(port => { + appListener = server(app, port, () => { + fetch(`http://localhost:${port}/?X-Amz-Signature=abc123`) + }) + }) + }) + it('should handle connection errors', done => { getPort().then(port => { let error diff --git a/packages/datadog-plugin-http/src/client.js b/packages/datadog-plugin-http/src/client.js index 8da5ecd4983..8ea210a0ba9 100644 --- a/packages/datadog-plugin-http/src/client.js +++ b/packages/datadog-plugin-http/src/client.js @@ -58,7 +58,7 @@ class HttpClientPlugin extends ClientPlugin { span._spanContext._trace.record = false } - if (this.config.propagationFilter(uri)) { + if (!(hasAmazonSignature(options) || !this.config.propagationFilter(uri))) { this.tracer.inject(span, HTTP_HEADERS, options.headers) } @@ -195,6 +195,31 @@ function getHooks (config) { return { request } } +function hasAmazonSignature (options) { + if (!options) { + return false + } + + if (options.headers) { + const headers = Object.keys(options.headers) + .reduce((prev, next) => Object.assign(prev, { + [next.toLowerCase()]: options.headers[next] + }), {}) + + if (headers['x-amz-signature']) { + return true + } + + if ([].concat(headers['authorization']).some(startsWith('AWS4-HMAC-SHA256'))) { + return true + } + } + + const search = options.search || options.path + + return search && search.toLowerCase().indexOf('x-amz-signature=') !== -1 +} + function extractSessionDetails (options) { if (typeof options === 'string') { return new URL(options).host @@ -206,4 +231,8 @@ function extractSessionDetails (options) { return { host, port } } +function startsWith (searchString) { + return value => String(value).startsWith(searchString) +} + module.exports = HttpClientPlugin diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index 39f8cb8df38..ca2a89cf67d 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -462,6 +462,124 @@ describe('Plugin', () => { }) }) + it('should skip injecting if the Authorization header contains an AWS signature', done => { + const app = express() + + app.get('/', (req, res) => { + try { + expect(req.get('x-datadog-trace-id')).to.be.undefined + expect(req.get('x-datadog-parent-id')).to.be.undefined + + res.status(200).send() + + done() + } catch (e) { + done(e) + } + }) + + getPort().then(port => { + appListener = server(app, port, () => { + const req = http.request({ + port, + headers: { + Authorization: 'AWS4-HMAC-SHA256 ...' + } + }) + + req.end() + }) + }) + }) + + it('should skip injecting if one of the Authorization headers contains an AWS signature', done => { + const app = express() + + app.get('/', (req, res) => { + try { + expect(req.get('x-datadog-trace-id')).to.be.undefined + expect(req.get('x-datadog-parent-id')).to.be.undefined + + res.status(200).send() + + done() + } catch (e) { + done(e) + } + }) + + getPort().then(port => { + appListener = server(app, port, () => { + const req = http.request({ + port, + headers: { + Authorization: ['AWS4-HMAC-SHA256 ...'] + } + }) + + req.end() + }) + }) + }) + + it('should skip injecting if the X-Amz-Signature header is set', done => { + const app = express() + + app.get('/', (req, res) => { + try { + expect(req.get('x-datadog-trace-id')).to.be.undefined + expect(req.get('x-datadog-parent-id')).to.be.undefined + + res.status(200).send() + + done() + } catch (e) { + done(e) + } + }) + + getPort().then(port => { + appListener = server(app, port, () => { + const req = http.request({ + port, + headers: { + 'X-Amz-Signature': 'abc123' + } + }) + + req.end() + }) + }) + }) + + it('should skip injecting if the X-Amz-Signature query param is set', done => { + const app = express() + + app.get('/', (req, res) => { + try { + expect(req.get('x-datadog-trace-id')).to.be.undefined + expect(req.get('x-datadog-parent-id')).to.be.undefined + + res.status(200).send() + + done() + } catch (e) { + done(e) + } + }) + + getPort().then(port => { + appListener = server(app, port, () => { + const req = http.request({ + port, + path: '/?X-Amz-Signature=abc123' + }) + + req.end() + }) + }) + }) + it('should run the callback in the parent context', done => { const app = express() diff --git a/packages/datadog-plugin-http2/src/client.js b/packages/datadog-plugin-http2/src/client.js index 8dc853c450d..4a60ee0b4db 100644 --- a/packages/datadog-plugin-http2/src/client.js +++ b/packages/datadog-plugin-http2/src/client.js @@ -62,7 +62,9 @@ class Http2ClientPlugin extends ClientPlugin { addHeaderTags(span, headers, HTTP_REQUEST_HEADERS, this.config) - this.tracer.inject(span, HTTP_HEADERS, headers) + if (!hasAmazonSignature(headers, path)) { + this.tracer.inject(span, HTTP_HEADERS, headers) + } message.parentStore = store message.currentStore = { ...store, span } @@ -131,6 +133,29 @@ function extractSessionDetails (authority, options) { return { protocol, port, host } } +function hasAmazonSignature (headers, path) { + if (headers) { + headers = Object.keys(headers) + .reduce((prev, next) => Object.assign(prev, { + [next.toLowerCase()]: headers[next] + }), {}) + + if (headers['x-amz-signature']) { + return true + } + + if ([].concat(headers['authorization']).some(startsWith('AWS4-HMAC-SHA256'))) { + return true + } + } + + return path && path.toLowerCase().indexOf('x-amz-signature=') !== -1 +} + +function startsWith (searchString) { + return value => String(value).startsWith(searchString) +} + function getStatusValidator (config) { if (typeof config.validateStatus === 'function') { return config.validateStatus diff --git a/packages/datadog-plugin-http2/test/client.spec.js b/packages/datadog-plugin-http2/test/client.spec.js index c19f6905222..89ec4cb1ab3 100644 --- a/packages/datadog-plugin-http2/test/client.spec.js +++ b/packages/datadog-plugin-http2/test/client.spec.js @@ -381,6 +381,139 @@ describe('Plugin', () => { }) }) + it('should skip injecting if the Authorization header contains an AWS signature', done => { + const app = (stream, headers) => { + try { + expect(headers['x-datadog-trace-id']).to.be.undefined + expect(headers['x-datadog-parent-id']).to.be.undefined + + stream.respond({ + ':status': 200 + }) + stream.end() + + done() + } catch (e) { + done(e) + } + } + + getPort().then(port => { + appListener = server(app, port, () => { + const headers = { + Authorization: 'AWS4-HMAC-SHA256 ...' + } + const client = http2 + .connect(`${protocol}://localhost:${port}`) + .on('error', done) + + const req = client.request(headers) + req.on('error', done) + + req.end() + }) + }) + }) + + it('should skip injecting if one of the Authorization headers contains an AWS signature', done => { + const app = (stream, headers) => { + try { + expect(headers['x-datadog-trace-id']).to.be.undefined + expect(headers['x-datadog-parent-id']).to.be.undefined + + stream.respond({ + ':status': 200 + }) + stream.end() + + done() + } catch (e) { + done(e) + } + } + + getPort().then(port => { + appListener = server(app, port, () => { + const headers = { + Authorization: ['AWS4-HMAC-SHA256 ...'] + } + const client = http2 + .connect(`${protocol}://localhost:${port}`) + .on('error', done) + + const req = client.request(headers) + req.on('error', done) + + req.end() + }) + }) + }) + + it('should skip injecting if the X-Amz-Signature header is set', done => { + const app = (stream, headers) => { + try { + expect(headers['x-datadog-trace-id']).to.be.undefined + expect(headers['x-datadog-parent-id']).to.be.undefined + + stream.respond({ + ':status': 200 + }) + stream.end() + + done() + } catch (e) { + done(e) + } + } + + getPort().then(port => { + appListener = server(app, port, () => { + const headers = { + 'X-Amz-Signature': 'abc123' + } + const client = http2 + .connect(`${protocol}://localhost:${port}`) + .on('error', done) + + const req = client.request(headers) + req.on('error', done) + + req.end() + }) + }) + }) + + it('should skip injecting if the X-Amz-Signature query param is set', done => { + const app = (stream, headers) => { + try { + expect(headers['x-datadog-trace-id']).to.be.undefined + expect(headers['x-datadog-parent-id']).to.be.undefined + + stream.respond({ + ':status': 200 + }) + stream.end() + + done() + } catch (e) { + done(e) + } + } + + getPort().then(port => { + appListener = server(app, port, () => { + const client = http2 + .connect(`${protocol}://localhost:${port}`) + .on('error', done) + + const req = client.request({ ':path': '/?X-Amz-Signature=abc123' }) + req.on('error', done) + + req.end() + }) + }) + }) + it('should run the callback in the parent context', done => { const app = (stream, headers) => { stream.respond({ From 9000ba15e694ef8c31d9b253b4082481b6e3fdb7 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Mon, 20 Nov 2023 16:59:39 +0100 Subject: [PATCH 076/147] Fix enabling of timeline profiler: (#3807) * `DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED=0` no longer enables it * can't set it by including a profiler type `'events'` directly * can only set it through either the env variable or options, with consistent results. --- packages/dd-trace/src/profiling/config.js | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index ffcf58938be..2b52d3297d9 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -131,10 +131,12 @@ class Config { : getProfilers({ DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, - DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED, DD_PROFILING_PROFILERS }) + this.timelineEnabled = isTrue(coalesce(options.timelineEnabled, + DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED, false)) + this.codeHotspotsEnabled = isTrue(coalesce(options.codeHotspotsEnabled, DD_PROFILING_CODEHOTSPOTS_ENABLED, DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED, false)) @@ -147,8 +149,7 @@ class Config { module.exports = { Config } function getProfilers ({ - DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, - DD_PROFILING_PROFILERS, DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED + DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, DD_PROFILING_PROFILERS }) { // First consider "legacy" DD_PROFILING_PROFILERS env variable, defaulting to wall + space // Use a Set to avoid duplicates @@ -172,11 +173,6 @@ function getProfilers ({ } } - // Events profiler is a profiler for timeline events that goes with the wall - // profiler - if (profilers.has('wall') && DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED) { - profilers.add('events') - } return [...profilers] } @@ -238,8 +234,6 @@ function getProfiler (name, options) { return new WallProfiler(options) case 'space': return new SpaceProfiler(options) - case 'events': - return new EventsProfiler(options) default: options.logger.error(`Unknown profiler "${name}"`) } @@ -257,6 +251,11 @@ function ensureProfilers (profilers, options) { } } + // Events profiler is a profiler for timeline events + if (options.timelineEnabled) { + profilers.push(new EventsProfiler(options)) + } + // Filter out any invalid profilers return profilers.filter(v => v) } From 1dd26d3a5e8b4e44e62d6fb6ff2cc4187db4dd48 Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Mon, 20 Nov 2023 17:23:35 +0100 Subject: [PATCH 077/147] Load appsec rules in appsec/rule_manager.js (#3805) * Load appsec rules in rule_manager.js instead of config.js --- packages/dd-trace/src/appsec/index.js | 2 +- packages/dd-trace/src/appsec/rule_manager.js | 15 ++++--- packages/dd-trace/src/config.js | 2 +- .../test/appsec/bad-formatted-rules.json | 10 +++++ .../test/appsec/blocking-actions-rules.json | 43 +++++++++++++++++++ packages/dd-trace/test/appsec/index.spec.js | 12 +++--- .../dd-trace/test/appsec/rule_manager.spec.js | 38 ++++++++-------- packages/dd-trace/test/config.spec.js | 17 +++----- 8 files changed, 95 insertions(+), 44 deletions(-) create mode 100644 packages/dd-trace/test/appsec/bad-formatted-rules.json create mode 100644 packages/dd-trace/test/appsec/blocking-actions-rules.json diff --git a/packages/dd-trace/src/appsec/index.js b/packages/dd-trace/src/appsec/index.js index dfd04ae3c0e..a6ecc62902b 100644 --- a/packages/dd-trace/src/appsec/index.js +++ b/packages/dd-trace/src/appsec/index.js @@ -36,7 +36,7 @@ function enable (_config) { setTemplates(_config) - RuleManager.applyRules(_config.appsec.rules, _config.appsec) + RuleManager.loadRules(_config.appsec) remoteConfig.enableWafUpdate(_config.appsec) diff --git a/packages/dd-trace/src/appsec/rule_manager.js b/packages/dd-trace/src/appsec/rule_manager.js index 4c91748adae..7f13d14bb34 100644 --- a/packages/dd-trace/src/appsec/rule_manager.js +++ b/packages/dd-trace/src/appsec/rule_manager.js @@ -1,5 +1,6 @@ 'use strict' +const fs = require('fs') const waf = require('./waf') const { ACKNOWLEDGED, ERROR } = require('./remote_config/apply_states') const blocking = require('./blocking') @@ -13,13 +14,15 @@ let appliedExclusions = new Map() let appliedCustomRules = new Map() let appliedActions = new Map() -function applyRules (rules, config) { - defaultRules = rules +function loadRules (config) { + defaultRules = config.rules + ? JSON.parse(fs.readFileSync(config.rules)) + : require('./recommended.json') - waf.init(rules, config) + waf.init(defaultRules, config) - if (rules.actions) { - blocking.updateBlockingConfiguration(rules.actions.find(action => action.id === 'block')) + if (defaultRules.actions) { + blocking.updateBlockingConfiguration(defaultRules.actions.find(action => action.id === 'block')) } } @@ -252,7 +255,7 @@ function clearAllRules () { } module.exports = { - applyRules, + loadRules, updateWafFromRC, clearAllRules } diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index b1800e21543..330275be3da 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -610,7 +610,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this.tagsHeaderMaxLength = parseInt(DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH) this.appsec = { enabled: DD_APPSEC_ENABLED, - rules: DD_APPSEC_RULES ? safeJsonParse(maybeFile(DD_APPSEC_RULES)) : require('./appsec/recommended.json'), + rules: DD_APPSEC_RULES, customRulesProvided: !!DD_APPSEC_RULES, rateLimit: DD_APPSEC_TRACE_RATE_LIMIT, wafTimeout: DD_APPSEC_WAF_TIMEOUT, diff --git a/packages/dd-trace/test/appsec/bad-formatted-rules.json b/packages/dd-trace/test/appsec/bad-formatted-rules.json new file mode 100644 index 00000000000..f74a9a6e14a --- /dev/null +++ b/packages/dd-trace/test/appsec/bad-formatted-rules.json @@ -0,0 +1,10 @@ +{ + "version": "2.2", + "metadata": { + "rules_version": "1.5.0" + }, + "rules": [ + { + + ] +} diff --git a/packages/dd-trace/test/appsec/blocking-actions-rules.json b/packages/dd-trace/test/appsec/blocking-actions-rules.json new file mode 100644 index 00000000000..76ec63a10d5 --- /dev/null +++ b/packages/dd-trace/test/appsec/blocking-actions-rules.json @@ -0,0 +1,43 @@ +{ + "version": "2.2", + "metadata": { + "rules_version": "1.9.0" + }, + "rules": [ + { + "id": "blk-001-001", + "name": "Block IP Addresses", + "tags": { + "type": "block_ip", + "category": "security_response" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "http.client_ip" + } + ], + "data": "blocked_ips" + }, + "operator": "ip_match" + } + ], + "transformers": [], + "on_match": [ + "block" + ] + } + ], + "actions": [ + { + "id": "block", + "otherParam": "other" + }, + { + "id": "otherId", + "moreParams": "more" + } + ] +} diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index 04f5c597a51..7cc4dc070e9 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -1,5 +1,6 @@ 'use strict' +const fs = require('fs') const proxyquire = require('proxyquire') const waf = require('../../src/appsec/waf') const RuleManager = require('../../src/appsec/rule_manager') @@ -38,7 +39,7 @@ describe('AppSec Index', () => { config = { appsec: { enabled: true, - rules: RULES, + rules: './path/rules.json', rateLimit: 42, wafTimeout: 42, obfuscatorKeyRegex: '.*', @@ -83,8 +84,9 @@ describe('AppSec Index', () => { './telemetry': appsecTelemetry }) + sinon.stub(fs, 'readFileSync').returns(JSON.stringify(RULES)) sinon.stub(waf, 'init').callThrough() - sinon.stub(RuleManager, 'applyRules') + sinon.stub(RuleManager, 'loadRules') sinon.stub(Reporter, 'setRateLimit') sinon.stub(incomingHttpRequestStart, 'subscribe') sinon.stub(incomingHttpRequestEnd, 'subscribe') @@ -101,7 +103,7 @@ describe('AppSec Index', () => { AppSec.enable(config) expect(blocking.setTemplates).to.have.been.calledOnceWithExactly(config) - expect(RuleManager.applyRules).to.have.been.calledOnceWithExactly(RULES, config.appsec) + expect(RuleManager.loadRules).to.have.been.calledOnceWithExactly(config.appsec) expect(Reporter.setRateLimit).to.have.been.calledOnceWithExactly(42) expect(incomingHttpRequestStart.subscribe) .to.have.been.calledOnceWithExactly(AppSec.incomingHttpStartTranslator) @@ -109,10 +111,10 @@ describe('AppSec Index', () => { }) it('should log when enable fails', () => { - RuleManager.applyRules.restore() + RuleManager.loadRules.restore() const err = new Error('Invalid Rules') - sinon.stub(RuleManager, 'applyRules').throws(err) + sinon.stub(RuleManager, 'loadRules').throws(err) AppSec.enable(config) diff --git a/packages/dd-trace/test/appsec/rule_manager.spec.js b/packages/dd-trace/test/appsec/rule_manager.spec.js index 7d989532871..b2162b42a82 100644 --- a/packages/dd-trace/test/appsec/rule_manager.spec.js +++ b/packages/dd-trace/test/appsec/rule_manager.spec.js @@ -1,6 +1,8 @@ 'use strict' -const { applyRules, clearAllRules, updateWafFromRC } = require('../../src/appsec/rule_manager') +const fs = require('fs') +const path = require('path') +const { loadRules, clearAllRules, updateWafFromRC } = require('../../src/appsec/rule_manager') const Config = require('../../src/config') const { ACKNOWLEDGED } = require('../../src/appsec/remote_config/apply_states') @@ -27,30 +29,21 @@ describe('AppSec Rule Manager', () => { clearAllRules() }) - describe('applyRules', () => { + describe('loadRules', () => { it('should call waf init with proper params', () => { - applyRules(rules, config.appsec) + loadRules(config.appsec) expect(waf.init).to.have.been.calledOnceWithExactly(rules, config.appsec) expect(blocking.updateBlockingConfiguration).not.to.have.been.called }) it('should call updateBlockingConfiguration with proper params', () => { - const testRules = { - ...rules, - actions: [ - { - id: 'block', - otherParam: 'other' - }, - { - id: 'otherId', - moreParams: 'more' - } - ] - } + const rulesPath = path.join(__dirname, './blocking-actions-rules.json') + const testRules = JSON.parse(fs.readFileSync(rulesPath)) - applyRules(testRules, config.appsec) + config.appsec.rules = rulesPath + + loadRules(config.appsec) expect(waf.init).to.have.been.calledOnceWithExactly(testRules, config.appsec) expect(blocking.updateBlockingConfiguration).to.have.been.calledOnceWithExactly({ @@ -61,14 +54,17 @@ describe('AppSec Rule Manager', () => { it('should throw if null/undefined are passed', () => { // TODO: fix the exception thrown in the waf or catch it in rule_manager? - expect(() => { applyRules(undefined, config.appsec) }).to.throw() - expect(() => { applyRules(null, config.appsec) }).to.throw() + config.appsec.rules = './not/existing/file.json' + expect(() => { loadRules(config.appsec) }).to.throw() + + config.appsec.rules = './bad-formatted-rules.json' + expect(() => { loadRules(config.appsec) }).to.throw() }) }) describe('clearAllRules', () => { it('should call clear method on all applied rules', () => { - applyRules(rules, config.appsec) + loadRules(config.appsec) expect(waf.init).to.have.been.calledOnce clearAllRules() @@ -288,7 +284,7 @@ describe('AppSec Rule Manager', () => { describe('ASM_DD', () => { beforeEach(() => { - applyRules(rules, config.appsec) + loadRules(config.appsec) }) it('should apply new rules', () => { diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 877729e0ebb..eeffd13c09e 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -17,9 +17,7 @@ describe('Config', () => { let osType const RECOMMENDED_JSON_PATH = require.resolve('../src/appsec/recommended.json') - const RECOMMENDED_JSON = require(RECOMMENDED_JSON_PATH) const RULES_JSON_PATH = require.resolve('./fixtures/config/appsec-rules.json') - const RULES_JSON = require(RULES_JSON_PATH) const BLOCKED_TEMPLATE_HTML_PATH = require.resolve('./fixtures/config/appsec-blocked-template.html') const BLOCKED_TEMPLATE_HTML = readFileSync(BLOCKED_TEMPLATE_HTML_PATH, { encoding: 'utf8' }) const BLOCKED_TEMPLATE_JSON_PATH = require.resolve('./fixtures/config/appsec-blocked-template.json') @@ -101,7 +99,7 @@ describe('Config', () => { expect(config).to.have.nested.property('experimental.exporter', undefined) expect(config).to.have.nested.property('experimental.enableGetRumData', false) expect(config).to.have.nested.property('appsec.enabled', undefined) - expect(config).to.have.nested.property('appsec.rules', RECOMMENDED_JSON) + expect(config).to.have.nested.property('appsec.rules', undefined) expect(config).to.have.nested.property('appsec.customRulesProvided', false) expect(config).to.have.nested.property('appsec.rateLimit', 100) expect(config).to.have.nested.property('appsec.wafTimeout', 5e3) @@ -277,7 +275,7 @@ describe('Config', () => { expect(config).to.have.nested.property('experimental.exporter', 'log') expect(config).to.have.nested.property('experimental.enableGetRumData', true) expect(config).to.have.nested.property('appsec.enabled', true) - expect(config).to.have.nested.deep.property('appsec.rules', RULES_JSON) + expect(config).to.have.nested.property('appsec.rules', RULES_JSON_PATH) expect(config).to.have.nested.property('appsec.customRulesProvided', true) expect(config).to.have.nested.property('appsec.rateLimit', 42) expect(config).to.have.nested.property('appsec.wafTimeout', 42) @@ -758,7 +756,7 @@ describe('Config', () => { expect(config).to.have.nested.property('experimental.exporter', 'agent') expect(config).to.have.nested.property('experimental.enableGetRumData', false) expect(config).to.have.nested.property('appsec.enabled', true) - expect(config).to.have.nested.deep.property('appsec.rules', RULES_JSON) + expect(config).to.have.nested.property('appsec.rules', RULES_JSON_PATH) expect(config).to.have.nested.property('appsec.customRulesProvided', true) expect(config).to.have.nested.property('appsec.rateLimit', 42) expect(config).to.have.nested.property('appsec.wafTimeout', 42) @@ -813,7 +811,7 @@ describe('Config', () => { expect(config).to.have.deep.property('appsec', { enabled: true, - rules: RECOMMENDED_JSON, + rules: undefined, customRulesProvided: false, rateLimit: 42, wafTimeout: 42, @@ -1095,19 +1093,18 @@ describe('Config', () => { const config = new Config({ appsec: { enabled: true, - rules: 'DOES_NOT_EXIST.json', + rules: 'path/to/rules.json', blockedTemplateHtml: 'DOES_NOT_EXIST.html', blockedTemplateJson: 'DOES_NOT_EXIST.json' } }) - expect(log.error).to.be.callCount(3) + expect(log.error).to.be.callCount(2) expect(log.error.firstCall).to.have.been.calledWithExactly(error) expect(log.error.secondCall).to.have.been.calledWithExactly(error) - expect(log.error.thirdCall).to.have.been.calledWithExactly(error) expect(config.appsec.enabled).to.be.true - expect(config.appsec.rules).to.be.undefined + expect(config.appsec.rules).to.eq('path/to/rules.json') expect(config.appsec.customRulesProvided).to.be.true expect(config.appsec.blockedTemplateHtml).to.be.undefined expect(config.appsec.blockedTemplateJson).to.be.undefined From 0457248c8550e15a170e35d3999a12b1d642984d Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Mon, 20 Nov 2023 17:29:49 +0100 Subject: [PATCH 078/147] PROF-8545: Memoize web tags in all ancestors (#3792) * Rewrote recursively for clarity. Changed nomenclature from "cached" to "memoized". Now memoizing in all ancestor spans; ideally the lookup will stop at the immediate parent as it already has its web tags memoized. * Fix tests; span type and resource name must be set before the span was activated --- integration-tests/profiler/codehotspots.js | 6 +- .../dd-trace/src/profiling/profilers/wall.js | 66 ++++++++++--------- 2 files changed, 38 insertions(+), 34 deletions(-) diff --git a/integration-tests/profiler/codehotspots.js b/integration-tests/profiler/codehotspots.js index fe40b891363..c72342a7adb 100644 --- a/integration-tests/profiler/codehotspots.js +++ b/integration-tests/profiler/codehotspots.js @@ -18,13 +18,11 @@ function busyLoop () { let counter = 0 function runBusySpans () { - tracer.trace('x' + counter, (span, done) => { - span.setTag('span.type', 'web') - span.setTag('resource.name', `endpoint-${counter}`) + tracer.trace('x' + counter, { type: 'web', resource: `endpoint-${counter}` }, (_, done) => { setImmediate(() => { for (let i = 0; i < 3; ++i) { const z = i - tracer.trace('y' + i, (span2, done2) => { + tracer.trace('y' + i, (_, done2) => { setTimeout(() => { busyLoop() done2() diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index ed4d20aab51..c6ccacfde4b 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -15,7 +15,7 @@ const spanFinishCh = dc.channel('dd-trace:span:finish') const profilerTelemetryMetrics = telemetryMetrics.manager.namespace('profilers') const threadName = `${threadNamePrefix} Event Loop` -const CachedWebTags = Symbol('NativeWallProfiler.CachedWebTags') +const MemoizedWebTags = Symbol('NativeWallProfiler.MemoizedWebTags') let kSampleCount @@ -61,6 +61,38 @@ function endpointNameFromTags (tags) { ].filter(v => v).join(' ') } +function getWebTags (startedSpans, i, span) { + // Are web tags for this span already memoized? + const memoizedWebTags = span[MemoizedWebTags] + if (memoizedWebTags !== undefined) { + return memoizedWebTags + } + // No, we'll have to memoize a new value + function memoize (tags) { + span[MemoizedWebTags] = tags + return tags + } + // Is this span itself a web span? + const context = span.context() + const tags = context._tags + if (isWebServerSpan(tags)) { + return memoize(tags) + } + // It isn't. Get parent's web tags (memoize them too recursively.) + // There might be several webspans, for example with next.js, http plugin creates the first span + // and then next.js plugin creates a child span, and this child span has the correct endpoint + // information. That's why we always use the tags of the closest ancestor web span. + const parentId = context._parentId + while (--i >= 0) { + const ispan = startedSpans[i] + if (ispan.context()._spanId === parentId) { + return memoize(getWebTags(startedSpans, i, ispan)) + } + } + // Local root span with no web span + return memoize(null) +} + class NativeWallProfiler { constructor (options = {}) { this.type = 'wall' @@ -149,33 +181,7 @@ class NativeWallProfiler { const startedSpans = getStartedSpans(context) this._lastStartedSpans = startedSpans if (this._endpointCollectionEnabled) { - const cachedWebTags = span[CachedWebTags] - if (cachedWebTags === undefined) { - let found = false - // Find the first webspan starting from the end: - // There might be several webspans, for example with next.js, http plugin creates a first span - // and then next.js plugin creates a child span, and this child span has the correct endpoint information. - let nextSpanId = context._spanId - for (let i = startedSpans.length - 1; i >= 0; i--) { - const nextContext = startedSpans[i].context() - if (nextContext._spanId === nextSpanId) { - const tags = nextContext._tags - if (isWebServerSpan(tags)) { - this._lastWebTags = tags - span[CachedWebTags] = tags - found = true - break - } - nextSpanId = nextContext._parentId - } - } - if (!found) { - this._lastWebTags = undefined - span[CachedWebTags] = null // cache negative lookup result - } - } else { - this._lastWebTags = cachedWebTags - } + this._lastWebTags = getWebTags(startedSpans, startedSpans.length, span) } } else { this._lastStartedSpans = undefined @@ -204,8 +210,8 @@ class NativeWallProfiler { } _spanFinished (span) { - if (span[CachedWebTags]) { - span[CachedWebTags] = undefined + if (span[MemoizedWebTags]) { + span[MemoizedWebTags] = undefined } } From 64106495cfa553139bbb588f8782f6d37ba424b8 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Mon, 20 Nov 2023 19:33:50 +0100 Subject: [PATCH 079/147] PROF-8649: Emit wall sample timestamps even when code hotspots aren't used (#3808) * Allow wall samples on timeline even when code hotspots are not enabled * Node 21 won't emit a single GC event for very short runs (such is the profiler integration test), we must handle it --- integration-tests/profiler.spec.js | 3 +- .../src/profiling/profilers/events.js | 5 + .../dd-trace/src/profiling/profilers/wall.js | 94 +++++++++++-------- 3 files changed, 64 insertions(+), 38 deletions(-) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index 79cd7efe09b..cb65228a2e4 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -82,7 +82,8 @@ describe('profiler', () => { DD_PROFILING_EXPORTERS: 'file', DD_PROFILING_ENABLED: 1, DD_PROFILING_CODEHOTSPOTS_ENABLED: 1, - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: 1 + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: 1, + DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED: 1 } }) diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index 508c6df8388..ac0fd51ce74 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -46,6 +46,11 @@ class EventsProfiler { } profile () { + if (this.entries.length === 0) { + // No events in the period; don't produce a profile + return null + } + const stringTable = new StringTable() const timestampLabelKey = stringTable.dedup(END_TIMESTAMP) const kindLabelKey = stringTable.dedup('gc type') diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index c6ccacfde4b..991a44efd0a 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -28,28 +28,6 @@ function getStartedSpans (context) { return context._trace.started } -function generateLabels ({ context: { spanId, rootSpanId, webTags, endpoint }, timestamp }) { - const labels = { - [THREAD_NAME]: threadName, - // Incoming timestamps are in microseconds, we emit nanos. - [END_TIMESTAMP]: timestamp * 1000n - } - if (spanId) { - labels['span id'] = spanId - } - if (rootSpanId) { - labels['local root span id'] = rootSpanId - } - if (webTags && Object.keys(webTags).length !== 0) { - labels['trace endpoint'] = endpointNameFromTags(webTags) - } else if (endpoint) { - // fallback to endpoint computed when sample was taken - labels['trace endpoint'] = endpoint - } - - return labels -} - function isWebServerSpan (tags) { return tags[SPAN_TYPE] === WEB } @@ -100,14 +78,30 @@ class NativeWallProfiler { this._flushIntervalMillis = options.flushInterval || 60 * 1e3 // 60 seconds this._codeHotspotsEnabled = !!options.codeHotspotsEnabled this._endpointCollectionEnabled = !!options.endpointCollectionEnabled - this._withContexts = this._codeHotspotsEnabled || this._endpointCollectionEnabled + this._timelineEnabled = !!options.timelineEnabled + // We need to capture span data into the sample context for either code hotspots + // or endpoint collection. + this._captureSpanData = this._codeHotspotsEnabled || this._endpointCollectionEnabled + // We need to run the pprof wall profiler with sample contexts if we're either + // capturing span data or timeline is enabled (so we need sample timestamps, and for now + // timestamps require the sample contexts feature in the pprof wall profiler.) + this._withContexts = this._captureSpanData || this._timelineEnabled this._v8ProfilerBugWorkaroundEnabled = !!options.v8ProfilerBugWorkaroundEnabled this._mapper = undefined this._pprof = undefined - // Bind to this so the same value can be used to unsubscribe later - this._enter = this._enter.bind(this) - this._spanFinished = this._spanFinished.bind(this) + // Bind these to this so they can be used as callbacks + if (this._withContexts) { + if (this._captureSpanData) { + this._enter = this._enter.bind(this) + this._spanFinished = this._spanFinished.bind(this) + } + this._generateLabels = this._generateLabels.bind(this) + } else { + // Explicitly assigning, to express the intent that this is meant to be + // undefined when passed to pprof.time.stop() when not using sample contexts. + this._generateLabels = undefined + } this._logger = options.logger this._started = false } @@ -145,17 +139,20 @@ class NativeWallProfiler { }) if (this._withContexts) { - this._profilerState = this._pprof.time.getState() this._currentContext = {} this._pprof.time.setContext(this._currentContext) - this._lastSpan = undefined - this._lastStartedSpans = undefined - this._lastWebTags = undefined - this._lastSampleCount = 0 - beforeCh.subscribe(this._enter) - enterCh.subscribe(this._enter) - spanFinishCh.subscribe(this._spanFinished) + if (this._captureSpanData) { + this._profilerState = this._pprof.time.getState() + this._lastSpan = undefined + this._lastStartedSpans = undefined + this._lastWebTags = undefined + this._lastSampleCount = 0 + + beforeCh.subscribe(this._enter) + enterCh.subscribe(this._enter) + spanFinishCh.subscribe(this._spanFinished) + } } this._started = true @@ -227,12 +224,12 @@ class NativeWallProfiler { _stop (restart) { if (!this._started) return - if (this._withContexts) { + if (this._captureSpanData) { // update last sample context if needed this._enter() this._lastSampleCount = 0 } - const profile = this._pprof.time.stop(restart, this._withContexts ? generateLabels : undefined) + const profile = this._pprof.time.stop(restart, this._generateLabels) if (restart) { const v8BugDetected = this._pprof.time.v8ProfilerStuckEventLoopDetected() if (v8BugDetected !== 0) { @@ -242,6 +239,29 @@ class NativeWallProfiler { return profile } + _generateLabels ({ context: { spanId, rootSpanId, webTags, endpoint }, timestamp }) { + const labels = this._timelineEnabled ? { + [THREAD_NAME]: threadName, + // Incoming timestamps are in microseconds, we emit nanos. + [END_TIMESTAMP]: timestamp * 1000n + } : {} + + if (spanId) { + labels['span id'] = spanId + } + if (rootSpanId) { + labels['local root span id'] = rootSpanId + } + if (webTags && Object.keys(webTags).length !== 0) { + labels['trace endpoint'] = endpointNameFromTags(webTags) + } else if (endpoint) { + // fallback to endpoint computed when sample was taken + labels['trace endpoint'] = endpoint + } + + return labels + } + profile () { return this._stop(true) } @@ -254,7 +274,7 @@ class NativeWallProfiler { if (!this._started) return const profile = this._stop(false) - if (this._withContexts) { + if (this._captureSpanData) { beforeCh.unsubscribe(this._enter) enterCh.unsubscribe(this._enter) spanFinishCh.unsubscribe(this._spanFinished) From 2c35dde084d7ae9616afcdcc473a3bb525707c70 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Mon, 20 Nov 2023 13:44:20 -0500 Subject: [PATCH 080/147] always propagate tracestate when tracecontext is configured (#3810) * always propagate tracestate when tracecontext is configured * add support for DD_TRACE_PROPAGATION_EXTRACT_FIRST --- packages/dd-trace/src/config.js | 5 ++ packages/dd-trace/src/id.js | 12 +++++ .../src/opentracing/propagation/text_map.js | 19 ++++++-- .../opentracing/propagation/text_map.spec.js | 47 +++++++++++++++++++ 4 files changed, 78 insertions(+), 5 deletions(-) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 330275be3da..2ce3a8f3bf5 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -309,6 +309,10 @@ class Config { options.tracePropagationStyle, defaultPropagationStyle ) + const DD_TRACE_PROPAGATION_EXTRACT_FIRST = coalesce( + process.env.DD_TRACE_PROPAGATION_EXTRACT_FIRST, + false + ) const DD_TRACE_RUNTIME_ID_ENABLED = coalesce( options.experimental && options.experimental.runtimeId, process.env.DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED, @@ -579,6 +583,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) inject: DD_TRACE_PROPAGATION_STYLE_INJECT, extract: DD_TRACE_PROPAGATION_STYLE_EXTRACT } + this.tracePropagationExtractFirst = isTrue(DD_TRACE_PROPAGATION_EXTRACT_FIRST) this.experimental = { runtimeId: isTrue(DD_TRACE_RUNTIME_ID_ENABLED), exporter: DD_TRACE_EXPORTER, diff --git a/packages/dd-trace/src/id.js b/packages/dd-trace/src/id.js index f63964d74e3..9f437f1fa1a 100644 --- a/packages/dd-trace/src/id.js +++ b/packages/dd-trace/src/id.js @@ -42,6 +42,18 @@ class Identifier { toJSON () { return this.toString() } + + equals (other) { + const length = this._buffer.length + const otherLength = other._buffer.length + + // Only compare the bytes available in both IDs. + for (let i = length, j = otherLength; i >= 0 && j >= 0; i--, j--) { + if (this._buffer[i] !== other._buffer[j]) return false + } + + return true + } } // Create a buffer, using an optional hexadecimal value if provided. diff --git a/packages/dd-trace/src/opentracing/propagation/text_map.js b/packages/dd-trace/src/opentracing/propagation/text_map.js index 9335231c845..20a257bb61a 100644 --- a/packages/dd-trace/src/opentracing/propagation/text_map.js +++ b/packages/dd-trace/src/opentracing/propagation/text_map.js @@ -236,11 +236,20 @@ class TextMapPropagator { _extractDatadogContext (carrier) { const spanContext = this._extractGenericContext(carrier, traceKey, spanKey, 10) - if (spanContext) { - this._extractOrigin(carrier, spanContext) - this._extractBaggageItems(carrier, spanContext) - this._extractSamplingPriority(carrier, spanContext) - this._extractTags(carrier, spanContext) + if (!spanContext) return spanContext + + this._extractOrigin(carrier, spanContext) + this._extractBaggageItems(carrier, spanContext) + this._extractSamplingPriority(carrier, spanContext) + this._extractTags(carrier, spanContext) + + if (this._config.tracePropagationExtractFirst) return spanContext + + const tc = this._extractTraceparentContext(carrier) + + if (tc && spanContext._traceId.equals(tc._traceId)) { + spanContext._traceparent = tc._traceparent + spanContext._tracestate = tc._tracestate } return spanContext diff --git a/packages/dd-trace/test/opentracing/propagation/text_map.spec.js b/packages/dd-trace/test/opentracing/propagation/text_map.spec.js index 1fd69c54c81..469126010f0 100644 --- a/packages/dd-trace/test/opentracing/propagation/text_map.spec.js +++ b/packages/dd-trace/test/opentracing/propagation/text_map.spec.js @@ -464,6 +464,40 @@ describe('TextMapPropagator', () => { expect(first._spanId.toString(16)).to.equal(spanId) }) + it('should always extract tracestate from tracecontext when trace IDs match', () => { + textMap['traceparent'] = '00-0000000000000000000000000000007B-0000000000000456-01' + textMap['tracestate'] = 'other=bleh,dd=t.foo_bar_baz_:abc_!@#$%^&*()_+`-~;s:2;o:foo;t.dm:-4' + config.tracePropagationStyle.extract = ['datadog', 'tracecontext'] + + const carrier = textMap + const spanContext = propagator.extract(carrier) + + expect(spanContext._tracestate.get('other')).to.equal('bleh') + }) + + it(`should not extract tracestate from tracecontext when trace IDs don't match`, () => { + textMap['traceparent'] = '00-00000000000000000000000000000789-0000000000000456-01' + textMap['tracestate'] = 'other=bleh,dd=t.foo_bar_baz_:abc_!@#$%^&*()_+`-~;s:2;o:foo;t.dm:-4' + config.tracePropagationStyle.extract = ['datadog', 'tracecontext'] + + const carrier = textMap + const spanContext = propagator.extract(carrier) + + expect(spanContext._tracestate).to.be.undefined + }) + + it(`should not extract tracestate from tracecontext when configured to extract first`, () => { + textMap['traceparent'] = '00-0000000000000000000000000000007B-0000000000000456-01' + textMap['tracestate'] = 'other=bleh,dd=t.foo_bar_baz_:abc_!@#$%^&*()_+`-~;s:2;o:foo;t.dm:-4' + config.tracePropagationStyle.extract = ['datadog', 'tracecontext'] + config.tracePropagationExtractFirst = true + + const carrier = textMap + const spanContext = propagator.extract(carrier) + + expect(spanContext._tracestate).to.be.undefined + }) + describe('with B3 propagation as multiple headers', () => { beforeEach(() => { config.tracePropagationStyle.extract = ['b3multi'] @@ -746,6 +780,19 @@ describe('TextMapPropagator', () => { expect(spanContext._trace.tags['_dd.p.dm']).to.eql('-4') }) + it('should propagate other vendors', () => { + textMap['traceparent'] = '01-1111aaaa2222bbbb3333cccc4444dddd-5555eeee6666ffff-01' + textMap['tracestate'] = 'other=bleh,dd=t.foo_bar_baz_:abc_!@#$%^&*()_+`-~;s:2;o:foo;t.dm:-4' + config.tracePropagationStyle.extract = ['tracecontext'] + + const carrier = {} + const spanContext = propagator.extract(textMap) + + propagator.inject(spanContext, carrier) + + expect(carrier['tracestate']).to.include('other=bleh') + }) + it('should fix _dd.p.dm if invalid (non-hyphenated) input is received', () => { textMap['traceparent'] = '01-1111aaaa2222bbbb3333cccc4444dddd-5555eeee6666ffff-01' textMap['tracestate'] = 'other=bleh,dd=t.foo_bar_baz_:abc_!@#$%^&*()_+`-~;s:2;o:foo;t.dm:4' From c02378d892e83f8b0e891abdf7fa3b1b19709b3e Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Mon, 20 Nov 2023 21:02:31 +0100 Subject: [PATCH 081/147] `type` is not supported in Node 14, only `entryTypes` (#3816) --- packages/dd-trace/src/profiling/profilers/events.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index ac0fd51ce74..83174c014de 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -36,7 +36,7 @@ class EventsProfiler { this._observer = new PerformanceObserver(add.bind(this)) } // Currently only support GC - this._observer.observe({ type: 'gc' }) + this._observer.observe({ entryTypes: ['gc'] }) } stop () { From acfa49a2c06f9c6a57b2928e7765dc3d1c93507f Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Tue, 21 Nov 2023 12:48:43 -0500 Subject: [PATCH 082/147] [core] Collapse Next.js Static Resources to Reduce Cardinality (#3809) collapse Next.js static resource path names --- packages/datadog-instrumentations/src/next.js | 4 ++- packages/datadog-plugin-next/src/index.js | 14 ++++---- .../datadog-plugin-next/test/index.spec.js | 32 ++++++++++++++++--- 3 files changed, 37 insertions(+), 13 deletions(-) diff --git a/packages/datadog-instrumentations/src/next.js b/packages/datadog-instrumentations/src/next.js index 21a9f1187e3..b4406ba60b0 100644 --- a/packages/datadog-instrumentations/src/next.js +++ b/packages/datadog-instrumentations/src/next.js @@ -144,7 +144,9 @@ function instrument (req, res, handler) { function wrapServeStatic (serveStatic) { return function (req, res, path) { return instrument(req, res, () => { - if (pageLoadChannel.hasSubscribers && path) pageLoadChannel.publish({ page: path }) + if (pageLoadChannel.hasSubscribers && path) { + pageLoadChannel.publish({ page: path, isStatic: true }) + } return serveStatic.apply(this, arguments) }) diff --git a/packages/datadog-plugin-next/src/index.js b/packages/datadog-plugin-next/src/index.js index d0691bb2977..4bd1c21f984 100644 --- a/packages/datadog-plugin-next/src/index.js +++ b/packages/datadog-plugin-next/src/index.js @@ -65,7 +65,7 @@ class NextPlugin extends ServerPlugin { span.finish() } - pageLoad ({ page, isAppPath = false }) { + pageLoad ({ page, isAppPath = false, isStatic = false }) { const store = storage.getStore() if (!store) return @@ -82,12 +82,12 @@ class NextPlugin extends ServerPlugin { // remove ending /route or /page for appDir projects if (isAppPath) page = page.substring(0, page.lastIndexOf('/')) - // This is for static files whose 'page' includes the whole file path - // For normal page matches, like /api/hello/[name] and a req.url like /api/hello/world, - // nothing should happen - // For page matches like /User/something/public/text.txt and req.url like /text.txt, - // it should disregard the extra absolute path Next.js sometimes sets - if (page.includes(req.url)) page = req.url + // handle static resource + if (isStatic) { + page = req.url.includes('_next/static') + ? '/_next/static/*' + : '/public/*' + } span.addTags({ [COMPONENT]: this.constructor.id, diff --git a/packages/datadog-plugin-next/test/index.spec.js b/packages/datadog-plugin-next/test/index.spec.js index afee2b19817..d03668bcb2a 100644 --- a/packages/datadog-plugin-next/test/index.spec.js +++ b/packages/datadog-plugin-next/test/index.spec.js @@ -6,7 +6,7 @@ const axios = require('axios') const getPort = require('get-port') const { execSync, spawn } = require('child_process') const agent = require('../../dd-trace/test/plugins/agent') -const { writeFileSync } = require('fs') +const { writeFileSync, readdirSync } = require('fs') const { satisfies } = require('semver') const { DD_MAJOR } = require('../../../version') const { rawExpectedSchema } = require('./naming') @@ -347,7 +347,7 @@ describe('Plugin', function () { }) describe('for static files', () => { - it('should do automatic instrumentation', done => { + it('should do automatic instrumentation for assets', done => { agent .use(traces => { const spans = traces[0] @@ -355,7 +355,7 @@ describe('Plugin', function () { expect(spans[1]).to.have.property('name', 'next.request') expect(spans[1]).to.have.property('service', 'test') expect(spans[1]).to.have.property('type', 'web') - expect(spans[1]).to.have.property('resource', 'GET /test.txt') + expect(spans[1]).to.have.property('resource', 'GET /public/*') expect(spans[1].meta).to.have.property('span.kind', 'server') expect(spans[1].meta).to.have.property('http.method', 'GET') expect(spans[1].meta).to.have.property('http.status_code', '200') @@ -369,13 +369,35 @@ describe('Plugin', function () { .catch(done) }) + it('should do automatic instrumentation for static chunks', done => { + // get first static chunk file programatically + const file = readdirSync(`${__dirname}/.next/static/chunks`)[0] + + agent + .use(traces => { + const spans = traces[0] + + expect(spans[1]).to.have.property('name', 'next.request') + expect(spans[1]).to.have.property('resource', 'GET /_next/static/*') + expect(spans[1].meta).to.have.property('http.method', 'GET') + expect(spans[1].meta).to.have.property('http.status_code', '200') + expect(spans[1].meta).to.have.property('component', 'next') + }) + .then(done) + .catch(done) + + axios + .get(`http://127.0.0.1:${port}/_next/static/chunks/${file}`) + .catch(done) + }) + it('should pass resource path to parent span', done => { agent .use(traces => { const spans = traces[0] expect(spans[0]).to.have.property('name', 'web.request') - expect(spans[0]).to.have.property('resource', 'GET /test.txt') + expect(spans[0]).to.have.property('resource', 'GET /public/*') }) .then(done) .catch(done) @@ -505,7 +527,7 @@ describe('Plugin', function () { const standaloneTests = [ ['api', '/api/hello/world', 'GET /api/hello/[name]'], ['pages', '/hello/world', 'GET /hello/[name]'], - ['static files', '/test.txt', 'GET /test.txt'] + ['static files', '/test.txt', 'GET /public/*'] ] standaloneTests.forEach(([test, resource, expectedResource]) => { From c016298585fcd93a4a1d432dd20ab2f1b93bad75 Mon Sep 17 00:00:00 2001 From: Julio Gonzalez <107922352+hoolioh@users.noreply.github.com> Date: Wed, 22 Nov 2023 12:04:11 +0100 Subject: [PATCH 083/147] API Security integration (#3685) * Config variables. * Add support for schema reporting. * Add support for schema extraction on request addresses. * Pass response's status code as string. --------- Co-authored-by: Ugaitz Urien Co-authored-by: simon-id --- docs/test.ts | 4 + index.d.ts | 16 +++ package.json | 2 +- packages/dd-trace/src/appsec/addresses.js | 3 +- packages/dd-trace/src/appsec/index.js | 14 +- packages/dd-trace/src/appsec/reporter.js | 21 +++ .../src/appsec/waf/waf_context_wrapper.js | 9 +- .../dd-trace/src/appsec/waf/waf_manager.js | 1 - packages/dd-trace/src/config.js | 17 ++- .../test/appsec/api_security_rules.json | 108 +++++++++++++++ .../test/appsec/index.express.plugin.spec.js | 106 +++++++++++---- packages/dd-trace/test/appsec/index.spec.js | 123 +++++++++++++++++- .../dd-trace/test/appsec/reporter.spec.js | 36 +++++ .../dd-trace/test/appsec/waf/index.spec.js | 52 ++++---- .../appsec/waf/waf_context_wrapper.spec.js | 6 +- packages/dd-trace/test/config.spec.js | 55 ++++++++ yarn.lock | 8 +- 17 files changed, 511 insertions(+), 70 deletions(-) create mode 100644 packages/dd-trace/test/appsec/api_security_rules.json diff --git a/docs/test.ts b/docs/test.ts index 56532363b7a..dab49394a81 100644 --- a/docs/test.ts +++ b/docs/test.ts @@ -110,6 +110,10 @@ tracer.init({ blockedTemplateJson: './blocked.json', eventTracking: { mode: 'safe' + }, + apiSecurity: { + enabled: true, + requestSampling: 1.0 } } }); diff --git a/index.d.ts b/index.d.ts index 75c03519f38..0a7c859f51d 100644 --- a/index.d.ts +++ b/index.d.ts @@ -578,6 +578,22 @@ export declare interface TracerOptions { * @default 'safe' */ mode?: 'safe' | 'extended' | 'disabled' + }, + + /** + * Configuration for Api Security sampling + */ + apiSecurity?: { + /** Whether to enable Api Security. + * @default false + */ + enabled?: boolean, + + /** Controls the request sampling rate (between 0 and 1) in which Api Security is triggered. + * The value will be coerced back if it's outside of the 0-1 range. + * @default 0.1 + */ + requestSampling?: number } }; diff --git a/package.json b/package.json index 796ab6b5c8f..82940ca367b 100644 --- a/package.json +++ b/package.json @@ -68,7 +68,7 @@ "node": ">=16" }, "dependencies": { - "@datadog/native-appsec": "4.0.0", + "@datadog/native-appsec": "5.0.0", "@datadog/native-iast-rewriter": "2.2.1", "@datadog/native-iast-taint-tracking": "1.6.4", "@datadog/native-metrics": "^2.0.0", diff --git a/packages/dd-trace/src/appsec/addresses.js b/packages/dd-trace/src/appsec/addresses.js index 32c5fc0bee4..a4d47243a67 100644 --- a/packages/dd-trace/src/appsec/addresses.js +++ b/packages/dd-trace/src/appsec/addresses.js @@ -16,5 +16,6 @@ module.exports = { HTTP_CLIENT_IP: 'http.client_ip', - USER_ID: 'usr.id' + USER_ID: 'usr.id', + WAF_CONTEXT_PROCESSOR: 'waf.context.processor' } diff --git a/packages/dd-trace/src/appsec/index.js b/packages/dd-trace/src/appsec/index.js index a6ecc62902b..386918636cc 100644 --- a/packages/dd-trace/src/appsec/index.js +++ b/packages/dd-trace/src/appsec/index.js @@ -28,6 +28,14 @@ const { storage } = require('../../../datadog-core') let isEnabled = false let config +function sampleRequest ({ enabled, requestSampling }) { + if (!enabled || !requestSampling) { + return false + } + + return Math.random() <= requestSampling +} + function enable (_config) { if (isEnabled) return @@ -90,6 +98,10 @@ function incomingHttpStartTranslator ({ req, res, abortController }) { payload[addresses.HTTP_CLIENT_IP] = clientIp } + if (sampleRequest(config.appsec.apiSecurity)) { + payload[addresses.WAF_CONTEXT_PROCESSOR] = { 'extract-schema': true } + } + const actions = waf.run(payload, req) handleResults(actions, req, res, rootSpan, abortController) @@ -101,7 +113,7 @@ function incomingHttpEndTranslator ({ req, res }) { delete responseHeaders['set-cookie'] const payload = { - [addresses.HTTP_INCOMING_RESPONSE_CODE]: res.statusCode, + [addresses.HTTP_INCOMING_RESPONSE_CODE]: '' + res.statusCode, [addresses.HTTP_INCOMING_RESPONSE_HEADERS]: responseHeaders } diff --git a/packages/dd-trace/src/appsec/reporter.js b/packages/dd-trace/src/appsec/reporter.js index 5fbfb92cfb3..d22613c749a 100644 --- a/packages/dd-trace/src/appsec/reporter.js +++ b/packages/dd-trace/src/appsec/reporter.js @@ -10,6 +10,7 @@ const { incrementWafUpdatesMetric, incrementWafRequestsMetric } = require('./telemetry') +const zlib = require('zlib') // default limiter, configurable with setRateLimit() let limiter = new Limiter(100) @@ -140,6 +141,25 @@ function reportAttack (attackData) { rootSpan.addTags(newTags) } +function reportSchemas (derivatives) { + if (!derivatives) return + + const req = storage.getStore()?.req + const rootSpan = web.root(req) + + if (!rootSpan) return + + const tags = {} + for (const [address, value] of Object.entries(derivatives)) { + if (address.startsWith('_dd.appsec.s.req')) { + const gzippedValue = zlib.gzipSync(JSON.stringify(value)) + tags[address] = gzippedValue.toString('base64') + } + } + + rootSpan.addTags(tags) +} + function finishRequest (req, res) { const rootSpan = web.root(req) if (!rootSpan) return @@ -175,6 +195,7 @@ module.exports = { reportMetrics, reportAttack, reportWafUpdate: incrementWafUpdatesMetric, + reportSchemas, finishRequest, setRateLimit, mapHeaderAndTags diff --git a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js index 801949b8563..83ab3dcc1cd 100644 --- a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js +++ b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js @@ -10,9 +10,8 @@ const preventDuplicateAddresses = new Set([ ]) class WAFContextWrapper { - constructor (ddwafContext, requiredAddresses, wafTimeout, wafVersion, rulesVersion) { + constructor (ddwafContext, wafTimeout, wafVersion, rulesVersion) { this.ddwafContext = ddwafContext - this.requiredAddresses = requiredAddresses this.wafTimeout = wafTimeout this.wafVersion = wafVersion this.rulesVersion = rulesVersion @@ -26,7 +25,9 @@ class WAFContextWrapper { // TODO: possible optimizaion: only send params that haven't already been sent with same value to this wafContext for (const key of Object.keys(params)) { - if (this.requiredAddresses.has(key) && !this.addressesToSkip.has(key)) { + // TODO: requiredAddresses is no longer used due to processor addresses are not included in the list. Check on + // future versions when the actual addresses are included in the 'loaded' section inside diagnostics. + if (!this.addressesToSkip.has(key)) { inputs[key] = params[key] if (preventDuplicateAddresses.has(key)) { newAddressesToSkip.add(key) @@ -63,6 +64,8 @@ class WAFContextWrapper { Reporter.reportAttack(JSON.stringify(result.events)) } + Reporter.reportSchemas(result.derivatives) + return result.actions } catch (err) { log.error('Error while running the AppSec WAF') diff --git a/packages/dd-trace/src/appsec/waf/waf_manager.js b/packages/dd-trace/src/appsec/waf/waf_manager.js index ee077be17bd..deac04f80ed 100644 --- a/packages/dd-trace/src/appsec/waf/waf_manager.js +++ b/packages/dd-trace/src/appsec/waf/waf_manager.js @@ -37,7 +37,6 @@ class WAFManager { if (!wafContext) { wafContext = new WAFContextWrapper( this.ddwaf.createContext(), - this.ddwaf.requiredAddresses, this.wafTimeout, this.ddwafVersion, this.rulesVersion diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 2ce3a8f3bf5..3cc35ecb6aa 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -399,7 +399,6 @@ class Config { appsec.enabled, process.env.DD_APPSEC_ENABLED && isTrue(process.env.DD_APPSEC_ENABLED) ) - const DD_APPSEC_RULES = coalesce( appsec.rules, process.env.DD_APPSEC_RULES @@ -441,6 +440,16 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING, 'safe' ).toLowerCase() + const DD_EXPERIMENTAL_API_SECURITY_ENABLED = coalesce( + appsec?.apiSecurity?.enabled, + isTrue(process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED), + false + ) + const DD_API_SECURITY_REQUEST_SAMPLE_RATE = coalesce( + appsec?.apiSecurity?.requestSampling, + parseFloat(process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE), + 0.1 + ) const remoteConfigOptions = options.remoteConfig || {} const DD_REMOTE_CONFIGURATION_ENABLED = coalesce( @@ -626,8 +635,14 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) eventTracking: { enabled: ['extended', 'safe'].includes(DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING), mode: DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING + }, + apiSecurity: { + enabled: DD_EXPERIMENTAL_API_SECURITY_ENABLED, + // Coerce value between 0 and 1 + requestSampling: Math.min(1, Math.max(0, DD_API_SECURITY_REQUEST_SAMPLE_RATE)) } } + this.remoteConfig = { enabled: DD_REMOTE_CONFIGURATION_ENABLED, pollInterval: DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS diff --git a/packages/dd-trace/test/appsec/api_security_rules.json b/packages/dd-trace/test/appsec/api_security_rules.json new file mode 100644 index 00000000000..fad50fcd358 --- /dev/null +++ b/packages/dd-trace/test/appsec/api_security_rules.json @@ -0,0 +1,108 @@ +{ + "version": "2.2", + "rules": [ + { + "id": "tst-000-001-", + "name": "useless rule to avoid empty rule list", + "tags": { + "type": "lfi", + "crs_id": "000001", + "category": "attack_attempt" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "server.request.body" + } + ], + "regex": "^.*attack*" + }, + "operator": "match_regex" + } + ], + "transformers": [], + "on_match": [ + "block" + ] + } + ], + "processors": [ + { + "id": "preprocessor-001", + "generator": "extract_schema", + "conditions": [ + { + "operator": "equals", + "parameters": { + "inputs": [ + { + "address": "waf.context.processor", + "key_path": [ + "extract-schema" + ] + } + ], + "value": true, + "type": "boolean" + } + } + ], + "parameters": { + "mappings": [ + { + "inputs": [ + { + "address": "server.request.body" + } + ], + "output": "_dd.appsec.s.req.body" + }, + { + "inputs": [ + { + "address": "server.request.headers.no_cookies" + } + ], + "output": "_dd.appsec.s.req.headers" + }, + { + "inputs": [ + { + "address": "server.request.query" + } + ], + "output": "_dd.appsec.s.req.query" + }, + { + "inputs": [ + { + "address": "server.request.path_params" + } + ], + "output": "_dd.appsec.s.req.params" + }, + { + "inputs": [ + { + "address": "server.response.headers.no_cookies" + } + ], + "output": "_dd.appsec.s.res.headers" + }, + { + "inputs": [ + { + "address": "http.response.body" + } + ], + "output": "_dd.appsec.s.res.body" + } + ] + }, + "evaluate": true, + "output": true + } + ] +} diff --git a/packages/dd-trace/test/appsec/index.express.plugin.spec.js b/packages/dd-trace/test/appsec/index.express.plugin.spec.js index 4f81945c0e7..9b1d2ea52f8 100644 --- a/packages/dd-trace/test/appsec/index.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.express.plugin.spec.js @@ -7,6 +7,7 @@ const agent = require('../plugins/agent') const appsec = require('../../src/appsec') const Config = require('../../src/config') const { json } = require('../../src/appsec/blocked_templates') +const zlib = require('zlib') withVersions('express', 'express', version => { describe('Suspicious request blocking - query', () => { @@ -18,13 +19,20 @@ withVersions('express', 'express', version => { before((done) => { const express = require('../../../../versions/express').get() + const bodyParser = require('../../../../versions/body-parser').get() const app = express() + app.use(bodyParser.json()) + app.get('/', (req, res) => { requestBody() res.end('DONE') }) + app.post('/', (req, res) => { + res.end('DONE') + }) + getPort().then(newPort => { port = newPort server = app.listen(port, () => { @@ -33,37 +41,91 @@ withVersions('express', 'express', version => { }) }) - beforeEach(async () => { - requestBody = sinon.stub() - appsec.enable(new Config({ appsec: { enabled: true, rules: path.join(__dirname, 'express-rules.json') } })) - }) - - afterEach(() => { - appsec.disable() - }) - after(() => { server.close() return agent.close({ ritmReset: false }) }) - it('should not block the request without an attack', async () => { - const res = await axios.get(`http://localhost:${port}/?key=value`) + describe('Blocking', () => { + beforeEach(async () => { + requestBody = sinon.stub() + appsec.enable(new Config({ appsec: { enabled: true, rules: path.join(__dirname, 'express-rules.json') } })) + }) + + afterEach(() => { + appsec.disable() + }) + + it('should not block the request without an attack', async () => { + const res = await axios.get(`http://localhost:${port}/?key=value`) - expect(requestBody).to.be.calledOnce - expect(res.data).to.be.equal('DONE') + expect(requestBody).to.be.calledOnce + expect(res.data).to.be.equal('DONE') + }) + + it('should block the request when attack is detected', async () => { + try { + await axios.get(`http://localhost:${port}/?key=testattack`) + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + expect(e.response.status).to.be.equals(403) + expect(e.response.data).to.be.deep.equal(JSON.parse(json)) + expect(requestBody).not.to.be.called + } + }) }) - it('should block the request when attack is detected', async () => { - try { - await axios.get(`http://localhost:${port}/?key=testattack`) + describe('Api Security', () => { + let config - return Promise.reject(new Error('Request should not return 200')) - } catch (e) { - expect(e.response.status).to.be.equals(403) - expect(e.response.data).to.be.deep.equal(JSON.parse(json)) - expect(requestBody).not.to.be.called - } + beforeEach(() => { + config = new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'api_security_rules.json'), + apiSecurity: { + enabled: true, + requestSampling: 1.0 + } + } + }) + }) + + afterEach(() => { + appsec.disable() + }) + + it('should get the schema', async () => { + appsec.enable(config) + + const expectedSchema = zlib.gzipSync(JSON.stringify([{ 'key': [8] }])).toString('base64') + const res = await axios.post(`http://localhost:${port}/`, { key: 'value' }) + + await agent.use((traces) => { + const span = traces[0][0] + expect(span.meta).to.haveOwnProperty('_dd.appsec.s.req.body') + expect(span.meta['_dd.appsec.s.req.body']).to.be.equal(expectedSchema) + }) + + expect(res.status).to.be.equal(200) + expect(res.data).to.be.equal('DONE') + }) + + it('should not get the schema', async () => { + config.appsec.apiSecurity.requestSampling = 0 + appsec.enable(config) + + const res = await axios.post(`http://localhost:${port}/`, { key: 'value' }) + + await agent.use((traces) => { + const span = traces[0][0] + expect(span.meta).not.to.haveOwnProperty('_dd.appsec.s.req.body') + }) + + expect(res.status).to.be.equal(200) + expect(res.data).to.be.equal('DONE') + }) }) }) }) diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index 7cc4dc070e9..14306f8203a 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -49,6 +49,10 @@ describe('AppSec Index', () => { eventTracking: { enabled: true, mode: 'safe' + }, + apiSecurity: { + enabled: false, + requestSampling: 0 } } } @@ -266,6 +270,7 @@ describe('AppSec Index', () => { const rootSpan = { addTags: sinon.stub() } + web.root.returns(rootSpan) }) @@ -298,7 +303,7 @@ describe('AppSec Index', () => { AppSec.incomingHttpEndTranslator({ req, res }) expect(waf.run).to.have.been.calledOnceWithExactly({ - 'server.response.status': 201, + 'server.response.status': '201', 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 } }, req) @@ -339,7 +344,7 @@ describe('AppSec Index', () => { AppSec.incomingHttpEndTranslator({ req, res }) expect(waf.run).to.have.been.calledOnceWithExactly({ - 'server.response.status': 201, + 'server.response.status': '201', 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 } }, req) @@ -390,7 +395,7 @@ describe('AppSec Index', () => { AppSec.incomingHttpEndTranslator({ req, res }) expect(waf.run).to.have.been.calledOnceWithExactly({ - 'server.response.status': 201, + 'server.response.status': '201', 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 }, 'server.request.body': { a: '1' }, 'server.request.path_params': { c: '3' }, @@ -401,6 +406,118 @@ describe('AppSec Index', () => { }) }) + describe('Api Security', () => { + beforeEach(() => { + sinon.stub(waf, 'run') + + const rootSpan = { + addTags: sinon.stub() + } + + web.root.returns(rootSpan) + }) + + it('should not trigger schema extraction with sampling disabled', () => { + config.appsec.apiSecurity = { + enabled: true, + requestSampling: 0 + } + + AppSec.enable(config) + + const req = { + url: '/path', + headers: { + 'user-agent': 'Arachni', + 'host': 'localhost', + cookie: 'a=1;b=2' + }, + method: 'POST', + socket: { + remoteAddress: '127.0.0.1', + remotePort: 8080 + } + } + const res = {} + + AppSec.incomingHttpStartTranslator({ req, res }) + + expect(waf.run).to.have.been.calledOnceWithExactly({ + 'server.request.uri.raw': '/path', + 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, + 'server.request.method': 'POST', + 'http.client_ip': '127.0.0.1' + }, req) + }) + + it('should not trigger schema extraction with feature disabled', () => { + config.appsec.apiSecurity = { + enabled: false, + requestSampling: 1 + } + + AppSec.enable(config) + + const req = { + url: '/path', + headers: { + 'user-agent': 'Arachni', + 'host': 'localhost', + cookie: 'a=1;b=2' + }, + method: 'POST', + socket: { + remoteAddress: '127.0.0.1', + remotePort: 8080 + } + } + const res = {} + + AppSec.incomingHttpStartTranslator({ req, res }) + + expect(waf.run).to.have.been.calledOnceWithExactly({ + 'server.request.uri.raw': '/path', + 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, + 'server.request.method': 'POST', + 'http.client_ip': '127.0.0.1' + }, req) + }) + + it('should trigger schema extraction with sampling enabled', () => { + config.appsec.apiSecurity = { + enabled: true, + requestSampling: 1 + } + + AppSec.enable(config) + + const req = { + url: '/path', + headers: { + 'user-agent': 'Arachni', + 'host': 'localhost', + cookie: 'a=1;b=2' + }, + method: 'POST', + socket: { + remoteAddress: '127.0.0.1', + remotePort: 8080 + } + } + const res = {} + + AppSec.incomingHttpStartTranslator({ req, res }) + + expect(waf.run).to.have.been.calledOnceWithExactly({ + 'server.request.uri.raw': '/path', + 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, + 'server.request.method': 'POST', + 'http.client_ip': '127.0.0.1', + 'waf.context.processor': { 'extract-schema': true } + }, req) + }) + }) + describe('Channel handlers', () => { let abortController, req, res, rootSpan diff --git a/packages/dd-trace/test/appsec/reporter.spec.js b/packages/dd-trace/test/appsec/reporter.spec.js index ba96c885d7f..36f1e1b5276 100644 --- a/packages/dd-trace/test/appsec/reporter.spec.js +++ b/packages/dd-trace/test/appsec/reporter.spec.js @@ -2,6 +2,7 @@ const proxyquire = require('proxyquire') const { storage } = require('../../../datadog-core') +const zlib = require('zlib') describe('reporter', () => { let Reporter @@ -280,6 +281,41 @@ describe('reporter', () => { }) }) + describe('reportSchemas', () => { + it('should not call addTags if parameter is undefined', () => { + Reporter.reportSchemas(undefined) + expect(span.addTags).not.to.be.called + }) + + it('should call addTags with an empty array', () => { + Reporter.reportSchemas([]) + expect(span.addTags).to.be.calledOnceWithExactly({}) + }) + + it('should call addTags with matched tags', () => { + const schemaValue = [{ 'key': [8] }] + const derivatives = { + '_dd.appsec.s.req.headers': schemaValue, + '_dd.appsec.s.req.query': schemaValue, + '_dd.appsec.s.req.params': schemaValue, + '_dd.appsec.s.req.cookies': schemaValue, + '_dd.appsec.s.req.body': schemaValue, + 'custom.processor.output': schemaValue + } + + Reporter.reportSchemas(derivatives) + + const schemaEncoded = zlib.gzipSync(JSON.stringify(schemaValue)).toString('base64') + expect(span.addTags).to.be.calledOnceWithExactly({ + '_dd.appsec.s.req.headers': schemaEncoded, + '_dd.appsec.s.req.query': schemaEncoded, + '_dd.appsec.s.req.params': schemaEncoded, + '_dd.appsec.s.req.cookies': schemaEncoded, + '_dd.appsec.s.req.body': schemaEncoded + }) + }) + }) + describe('finishRequest', () => { let wafContext diff --git a/packages/dd-trace/test/appsec/waf/index.spec.js b/packages/dd-trace/test/appsec/waf/index.spec.js index 49ea3654a07..0c01a8ad788 100644 --- a/packages/dd-trace/test/appsec/waf/index.spec.js +++ b/packages/dd-trace/test/appsec/waf/index.spec.js @@ -26,10 +26,6 @@ describe('WAF Manager', () => { loaded: ['rule_1'], failed: [] } } - DDWAF.prototype.requiredAddresses = new Map([ - ['server.request.headers.no_cookies', { 'header': 'value' }], - ['server.request.uri.raw', 'https://testurl'] - ]) WAFManager = proxyquire('../../../src/appsec/waf/waf_manager', { '@datadog/native-appsec': { DDWAF } @@ -43,6 +39,7 @@ describe('WAF Manager', () => { sinon.stub(Reporter, 'reportMetrics') sinon.stub(Reporter, 'reportAttack') sinon.stub(Reporter, 'reportWafUpdate') + sinon.stub(Reporter, 'reportSchemas') webContext = {} sinon.stub(web, 'getContext').returns(webContext) @@ -213,39 +210,19 @@ describe('WAF Manager', () => { expect(ddwafContext.run).not.to.be.called }) - it('should not call ddwafContext.run with invalid params', () => { - waf.run({ - 'invalid_address': 'value' - }, req) - expect(ddwafContext.run).not.to.be.called - }) - it('should call ddwafContext.run with params', () => { ddwafContext.run.returns({ totalRuntime: 1, durationExt: 1 }) wafContextWrapper.run({ 'server.request.headers.no_cookies': { 'header': 'value' }, - 'server.request.uri.raw': 'https://testurl' + 'server.request.uri.raw': 'https://testurl', + 'processor.address': { 'extract-schema': true } }) expect(ddwafContext.run).to.be.calledOnceWithExactly({ 'server.request.headers.no_cookies': { 'header': 'value' }, - 'server.request.uri.raw': 'https://testurl' - }, config.appsec.wafTimeout) - }) - - it('should call ddwafContext.run with filtered params', () => { - ddwafContext.run.returns({ totalRuntime: 1, durationExt: 1 }) - - wafContextWrapper.run({ - 'server.request.headers.no_cookies': { 'header2': 'value2' }, - 'invalidaddress': 'invalid-value', - 'server.request.uri.raw': 'https://othertesturl' - }) - - expect(ddwafContext.run).to.be.calledOnceWithExactly({ - 'server.request.headers.no_cookies': { 'header2': 'value2' }, - 'server.request.uri.raw': 'https://othertesturl' + 'server.request.uri.raw': 'https://testurl', + 'processor.address': { 'extract-schema': true } }, config.appsec.wafTimeout) }) @@ -320,6 +297,25 @@ describe('WAF Manager', () => { expect(result).to.be.equals(actions) }) + + it('should report schemas when ddwafContext returns schemas in the derivatives', () => { + const result = { + totalRuntime: 1, + durationExt: 1, + derivatives: [{ '_dd.appsec.s.req.body': [8] }] + } + const params = { + 'server.request.body': 'value', + 'waf.context.processor': { + 'extract-schema': true + } + } + + ddwafContext.run.returns(result) + + wafContextWrapper.run(params) + expect(Reporter.reportSchemas).to.be.calledOnceWithExactly(result.derivatives) + }) }) }) }) diff --git a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js index d99300515a7..23df1adfdc5 100644 --- a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js +++ b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js @@ -5,14 +5,10 @@ const addresses = require('../../../src/appsec/addresses') describe('WAFContextWrapper', () => { it('Should send HTTP_INCOMING_QUERY only once', () => { - const requiredAddresses = new Set([ - addresses.HTTP_INCOMING_QUERY - ]) const ddwafContext = { run: sinon.stub() } - const wafContextWrapper = new WAFContextWrapper(ddwafContext, requiredAddresses, - 1000, '1.14.0', '1.8.0') + const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0') const payload = { [addresses.HTTP_INCOMING_QUERY]: { key: 'value' } diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index eeffd13c09e..3459c50e260 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -109,6 +109,8 @@ describe('Config', () => { expect(config).to.have.nested.property('appsec.blockedTemplateJson', undefined) expect(config).to.have.nested.property('appsec.eventTracking.enabled', true) expect(config).to.have.nested.property('appsec.eventTracking.mode', 'safe') + expect(config).to.have.nested.property('appsec.apiSecurity.enabled', false) + expect(config).to.have.nested.property('appsec.apiSecurity.requestSampling', 0.1) expect(config).to.have.nested.property('remoteConfig.enabled', true) expect(config).to.have.nested.property('remoteConfig.pollInterval', 5) expect(config).to.have.nested.property('iast.enabled', false) @@ -220,6 +222,9 @@ describe('Config', () => { process.env.DD_IAST_TELEMETRY_VERBOSITY = 'DEBUG' process.env.DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED = 'true' process.env.DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED = 'true' + process.env.DD_EXPERIMENTAL_PROFILING_ENABLED = 'true' + process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED = 'true' + process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE = 1 const config = new Config() @@ -285,6 +290,8 @@ describe('Config', () => { expect(config).to.have.nested.property('appsec.blockedTemplateJson', BLOCKED_TEMPLATE_JSON) expect(config).to.have.nested.property('appsec.eventTracking.enabled', true) expect(config).to.have.nested.property('appsec.eventTracking.mode', 'extended') + expect(config).to.have.nested.property('appsec.apiSecurity.enabled', true) + expect(config).to.have.nested.property('appsec.apiSecurity.requestSampling', 1) expect(config).to.have.nested.property('remoteConfig.enabled', false) expect(config).to.have.nested.property('remoteConfig.pollInterval', 42) expect(config).to.have.nested.property('iast.enabled', true) @@ -654,6 +661,8 @@ describe('Config', () => { process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML = BLOCKED_TEMPLATE_JSON // note the inversion between process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON = BLOCKED_TEMPLATE_HTML // json and html here process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING = 'disabled' + process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED = 'false' + process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE = 0.5 process.env.DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS = 11 process.env.DD_IAST_ENABLED = 'false' process.env.DD_IAST_REDACTION_NAME_PATTERN = 'name_pattern_to_be_overriden_by_options' @@ -717,6 +726,10 @@ describe('Config', () => { blockedTemplateJson: BLOCKED_TEMPLATE_JSON_PATH, eventTracking: { mode: 'safe' + }, + apiSecurity: { + enabled: true, + requestSampling: 1.0 } }, remoteConfig: { @@ -766,6 +779,8 @@ describe('Config', () => { expect(config).to.have.nested.property('appsec.blockedTemplateJson', BLOCKED_TEMPLATE_JSON) expect(config).to.have.nested.property('appsec.eventTracking.enabled', true) expect(config).to.have.nested.property('appsec.eventTracking.mode', 'safe') + expect(config).to.have.nested.property('appsec.apiSecurity.enabled', true) + expect(config).to.have.nested.property('appsec.apiSecurity.requestSampling', 1.0) expect(config).to.have.nested.property('remoteConfig.pollInterval', 42) expect(config).to.have.nested.property('iast.enabled', true) expect(config).to.have.nested.property('iast.requestSampling', 30) @@ -790,6 +805,10 @@ describe('Config', () => { blockedTemplateJson: undefined, eventTracking: { mode: 'disabled' + }, + apiSecurity: { + enabled: true, + requestSampling: 1.0 } }, experimental: { @@ -804,6 +823,10 @@ describe('Config', () => { blockedTemplateJson: BLOCKED_TEMPLATE_JSON_PATH, eventTracking: { mode: 'safe' + }, + apiSecurity: { + enabled: false, + requestSampling: 0.5 } } } @@ -822,6 +845,10 @@ describe('Config', () => { eventTracking: { enabled: false, mode: 'disabled' + }, + apiSecurity: { + enabled: true, + requestSampling: 1.0 } }) }) @@ -1331,4 +1358,32 @@ describe('Config', () => { expect(config).not.to.have.property('repositoryUrl') }) }) + it('should sanitize values for API Security sampling between 0 and 1', () => { + expect(new Config({ + appsec: { + apiSecurity: { + enabled: true, + requestSampling: 5 + } + } + })).to.have.nested.property('appsec.apiSecurity.requestSampling', 1) + + expect(new Config({ + appsec: { + apiSecurity: { + enabled: true, + requestSampling: -5 + } + } + })).to.have.nested.property('appsec.apiSecurity.requestSampling', 0) + + expect(new Config({ + appsec: { + apiSecurity: { + enabled: true, + requestSampling: 0.1 + } + } + })).to.have.nested.property('appsec.apiSecurity.requestSampling', 0.1) + }) }) diff --git a/yarn.lock b/yarn.lock index fb60fd791b5..d4b1dd55c2f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -385,10 +385,10 @@ resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== -"@datadog/native-appsec@4.0.0": - version "4.0.0" - resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-4.0.0.tgz#ee08138b987dec557eac3650a43a972dac85b6a6" - integrity sha512-myTguXJ3VQHS2E1ylNsSF1avNpDmq5t+K4Q47wdzeakGc3sDIDDyEbvuFTujl9c9wBIkup94O1mZj5DR37ajzA== +"@datadog/native-appsec@5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-5.0.0.tgz#e42e77f42062532ad7defa3a79090dc8b020c22b" + integrity sha512-Ks8a4L49N40w+TJjj2e9ncGssUIEjo4wnmUFjPBRvlLGuVj1VJLxCx7ztpd8eTycM5QQlzggCDOP6CMEVmeZbA== dependencies: node-gyp-build "^3.9.0" From f1140def0a475038651839ba98d45586311fdb33 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Fri, 24 Nov 2023 18:22:06 +0100 Subject: [PATCH 084/147] Expose a promise for profiler start (#3817) This makes tests that depend on it having been started less brittle as they don't need to sleep for a guessed duration. --- integration-tests/profiler/codehotspots.js | 2 +- packages/dd-trace/src/noop/proxy.js | 4 ++++ packages/dd-trace/src/profiler.js | 2 +- packages/dd-trace/src/profiling/profiler.js | 14 ++++++++++---- packages/dd-trace/src/proxy.js | 12 +++++++++++- 5 files changed, 27 insertions(+), 7 deletions(-) diff --git a/integration-tests/profiler/codehotspots.js b/integration-tests/profiler/codehotspots.js index c72342a7adb..9cffc768185 100644 --- a/integration-tests/profiler/codehotspots.js +++ b/integration-tests/profiler/codehotspots.js @@ -39,4 +39,4 @@ function runBusySpans () { }) } -setTimeout(runBusySpans, 100) +tracer.profilerStarted().then(runBusySpans) diff --git a/packages/dd-trace/src/noop/proxy.js b/packages/dd-trace/src/noop/proxy.js index daf077af8ff..db6e39392c9 100644 --- a/packages/dd-trace/src/noop/proxy.js +++ b/packages/dd-trace/src/noop/proxy.js @@ -20,6 +20,10 @@ class Tracer { return this } + profilerStarted () { + return Promise.resolve(false) + } + trace (name, options, fn) { if (!fn) { fn = options diff --git a/packages/dd-trace/src/profiler.js b/packages/dd-trace/src/profiler.js index e140ce0cf3b..9d42d4a6f6f 100644 --- a/packages/dd-trace/src/profiler.js +++ b/packages/dd-trace/src/profiler.js @@ -17,7 +17,7 @@ module.exports = { error: (message) => log.error(message) } - profiler.start({ + return profiler.start({ enabled, service, version, diff --git a/packages/dd-trace/src/profiling/profiler.js b/packages/dd-trace/src/profiling/profiler.js index c72fa3b6fba..4e5882189a9 100644 --- a/packages/dd-trace/src/profiling/profiler.js +++ b/packages/dd-trace/src/profiling/profiler.js @@ -23,15 +23,19 @@ class Profiler extends EventEmitter { } start (options) { - this._start(options).catch((err) => { if (options.logger) options.logger.error(err) }) - return this + return this._start(options).catch((err) => { + if (options.logger) { + options.logger.error(err) + } + return false + }) } async _start (options) { - if (this._enabled) return + if (this._enabled) return true const config = this._config = new Config(options) - if (!config.enabled) return + if (!config.enabled) return false this._logger = config.logger this._enabled = true @@ -67,9 +71,11 @@ class Profiler extends EventEmitter { } this._capture(this._timeoutInterval) + return true } catch (e) { this._logger.error(e) this._stop() + return false } } diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index a9d5bd037fd..2919ad9483b 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -62,11 +62,14 @@ class Tracer extends NoopProxy { // do not stop tracer initialization if the profiler fails to be imported try { const profiler = require('./profiler') - profiler.start(config) + this._profilerStarted = profiler.start(config) } catch (e) { log.error(e) } } + if (!this._profilerStarted) { + this._profilerStarted = Promise.resolve(false) + } if (config.runtimeMetrics) { runtimeMetrics.start(config) @@ -104,6 +107,13 @@ class Tracer extends NoopProxy { return this } + profilerStarted () { + if (!this._profilerStarted) { + throw new Error('profilerStarted() must be called after init()') + } + return this._profilerStarted + } + use () { this._pluginManager.configurePlugin(...arguments) return this From e80d57bbcaa836e144ebc1959ed14c39d3451479 Mon Sep 17 00:00:00 2001 From: Nicolas Savoire Date: Fri, 24 Nov 2023 18:22:24 +0100 Subject: [PATCH 085/147] Add source code integration tags to profiles (#3821) If `repositoryUrl` and `commitSHA` are set in profiler config, then set `git.repository_url` and `git.commit.sha` tags with these values respectively. --- packages/dd-trace/src/profiler.js | 6 ++++-- packages/dd-trace/src/profiling/config.js | 8 ++++++++ packages/dd-trace/test/profiling/config.spec.js | 12 ++++++++++++ 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/packages/dd-trace/src/profiler.js b/packages/dd-trace/src/profiler.js index 9d42d4a6f6f..349f0438d7c 100644 --- a/packages/dd-trace/src/profiler.js +++ b/packages/dd-trace/src/profiler.js @@ -8,7 +8,7 @@ process.once('beforeExit', () => { profiler.stop() }) module.exports = { start: config => { - const { service, version, env, url, hostname, port, tags } = config + const { service, version, env, url, hostname, port, tags, repositoryUrl, commitSHA } = config const { enabled, sourceMap, exporters } = config.profiling const logger = { debug: (message) => log.debug(message), @@ -28,7 +28,9 @@ module.exports = { url, hostname, port, - tags + tags, + repositoryUrl, + commitSHA }) }, diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 2b52d3297d9..a37015e97b7 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -11,6 +11,7 @@ const WallProfiler = require('./profilers/wall') const SpaceProfiler = require('./profilers/space') const EventsProfiler = require('./profilers/events') const { oomExportStrategies, snapshotKinds } = require('./constants') +const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('../plugins/util/tags') const { tagger } = require('./tagger') const { isFalse, isTrue } = require('../util') @@ -72,6 +73,13 @@ class Config { tagger.parse(options.tags), tagger.parse({ env, host, service, version, functionname }) ) + + // Add source code integration tags if available + if (options.repositoryUrl && options.commitSHA) { + this.tags[GIT_REPOSITORY_URL] = options.repositoryUrl + this.tags[GIT_COMMIT_SHA] = options.commitSHA + } + this.logger = ensureLogger(options.logger) const logger = this.logger function logExperimentalVarDeprecation (shortVarName) { diff --git a/packages/dd-trace/test/profiling/config.spec.js b/packages/dd-trace/test/profiling/config.spec.js index 483b078a7f8..70df5cc2296 100644 --- a/packages/dd-trace/test/profiling/config.spec.js +++ b/packages/dd-trace/test/profiling/config.spec.js @@ -262,6 +262,18 @@ describe('config', () => { expect(config.tags).to.include({ env, service, version }) }) + it('should add source code integration tags if git metadata is available', () => { + const DUMMY_GIT_SHA = '13851f2b092e97acebab1b73f6c0e7818e795b50' + const DUMMY_REPOSITORY_URL = 'git@github.com:DataDog/sci_git_example.git' + + const config = new Config({ + repositoryUrl: DUMMY_REPOSITORY_URL, + commitSHA: DUMMY_GIT_SHA + }) + + expect(config.tags).to.include({ 'git.repository_url': DUMMY_REPOSITORY_URL, 'git.commit.sha': DUMMY_GIT_SHA }) + }) + it('should support IPv6 hostname', () => { const options = { hostname: '::1' From b3bd9adcfa12f727c545bd513e98e8776ab8749d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Mon, 27 Nov 2023 16:49:09 +0100 Subject: [PATCH 086/147] [ci-visibility] Fix debug message for ITR settings (#3825) --- .../intelligent-test-runner/get-itr-configuration.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js index 2c5aade51e7..60abda9294e 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js @@ -74,7 +74,7 @@ function getItrConfiguration ({ let isCodeCoverageEnabled = attributes.code_coverage let isSuitesSkippingEnabled = attributes.tests_skipping - log.debug(() => `Remote settings: ${{ isCodeCoverageEnabled, isSuitesSkippingEnabled }}`) + log.debug(() => `Remote settings: ${JSON.stringify({ isCodeCoverageEnabled, isSuitesSkippingEnabled })}`) if (process.env.DD_CIVISIBILITY_DANGEROUSLY_FORCE_COVERAGE) { isCodeCoverageEnabled = true From 8002f224a7b2983c74d0aeab1ba889c9ea259b63 Mon Sep 17 00:00:00 2001 From: Ayan Khan Date: Tue, 28 Nov 2023 09:14:16 -0500 Subject: [PATCH 087/147] instrument aerospike v4, v5 (#3804) * instrument aerospike v4, v5 --- .github/workflows/plugins.yml | 24 ++ docker-compose.yml | 4 + .../datadog-instrumentations/src/aerospike.js | 47 +++ .../src/helpers/hooks.js | 1 + .../datadog-plugin-aerospike/src/index.js | 113 ++++++ .../test/index.spec.js | 325 ++++++++++++++++++ .../datadog-plugin-aerospike/test/naming.js | 19 + packages/dd-trace/src/plugins/index.js | 1 + .../src/service-naming/schemas/v0/storage.js | 5 + .../src/service-naming/schemas/v1/storage.js | 4 + packages/dd-trace/test/setup/mocha.js | 4 +- scripts/install_plugin_modules.js | 14 +- 12 files changed, 557 insertions(+), 4 deletions(-) create mode 100644 packages/datadog-instrumentations/src/aerospike.js create mode 100644 packages/datadog-plugin-aerospike/src/index.js create mode 100644 packages/datadog-plugin-aerospike/test/index.spec.js create mode 100644 packages/datadog-plugin-aerospike/test/naming.js diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 91765d7b52d..593882fab65 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -20,6 +20,30 @@ env: jobs: + aerospike: + runs-on: ubuntu-latest + services: + aerospike: + image: aerospike:ce-6.4.0.3 + ports: + - "127.0.0.1:3000-3002:3000-3002" + env: + PLUGINS: aerospike + SERVICES: aerospike + PACKAGE_VERSION_RANGE: '4.0.0 - 5.7.0' + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/testagent/start + - uses: ./.github/actions/node/setup + - run: yarn install --ignore-engines + - uses: ./.github/actions/node/oldest + - run: yarn test:plugins:ci + - run: echo "PACKAGE_VERSION_RANGE=>=5.8.0" >> "$GITHUB_ENV" + - uses: ./.github/actions/node/20 # currently the latest version of aerospike only supports node 20 + - run: yarn test:plugins:ci + - if: always() + uses: ./.github/actions/testagent/logs + - uses: codecov/codecov-action@v2 amqp10: # TODO: move rhea to its own job runs-on: ubuntu-latest services: diff --git a/docker-compose.yml b/docker-compose.yml index ec9a519fde6..2ff0e15120a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,9 @@ version: "2" services: + aerospike: + image: aerospike:ce-6.4.0.3 + ports: + - "127.0.0.1:3000-3002:3000-3002" couchbase: image: ghcr.io/datadog/couchbase-server-sandbox:latest ports: diff --git a/packages/datadog-instrumentations/src/aerospike.js b/packages/datadog-instrumentations/src/aerospike.js new file mode 100644 index 00000000000..2d20bdcafa0 --- /dev/null +++ b/packages/datadog-instrumentations/src/aerospike.js @@ -0,0 +1,47 @@ +'use strict' + +const { + addHook +} = require('./helpers/instrument') +const shimmer = require('../../datadog-shimmer') + +const tracingChannel = require('dc-polyfill').tracingChannel +const ch = tracingChannel('apm:aerospike:command') + +function wrapCreateCommand (createCommand) { + if (typeof createCommand !== 'function') return createCommand + + return function commandWithTrace () { + const CommandClass = createCommand.apply(this, arguments) + + if (!CommandClass) return CommandClass + + shimmer.wrap(CommandClass.prototype, 'process', wrapProcess) + + return CommandClass + } +} + +function wrapProcess (process) { + return function (...args) { + const cb = args[0] + if (typeof cb !== 'function') return process.apply(this, args) + + const ctx = { + commandName: this.constructor.name, + commandArgs: this.args, + clientConfig: this.client.config + } + + return ch.traceCallback(process, -1, ctx, this, ...args) + } +} + +addHook({ + name: 'aerospike', + file: 'lib/commands/command.js', + versions: ['4', '5'] +}, +commandFactory => { + return shimmer.wrap(commandFactory, wrapCreateCommand(commandFactory)) +}) diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js index de136bce5e7..bd409dcaa01 100644 --- a/packages/datadog-instrumentations/src/helpers/hooks.js +++ b/packages/datadog-instrumentations/src/helpers/hooks.js @@ -20,6 +20,7 @@ module.exports = { '@opentelemetry/sdk-trace-node': () => require('../otel-sdk-trace'), '@redis/client': () => require('../redis'), '@smithy/smithy-client': () => require('../aws-sdk'), + 'aerospike': () => require('../aerospike'), 'amqp10': () => require('../amqp10'), 'amqplib': () => require('../amqplib'), 'aws-sdk': () => require('../aws-sdk'), diff --git a/packages/datadog-plugin-aerospike/src/index.js b/packages/datadog-plugin-aerospike/src/index.js new file mode 100644 index 00000000000..fb4bd6a6d0a --- /dev/null +++ b/packages/datadog-plugin-aerospike/src/index.js @@ -0,0 +1,113 @@ +'use strict' + +const { storage } = require('../../datadog-core') +const DatabasePlugin = require('../../dd-trace/src/plugins/database') + +const AEROSPIKE_PEER_SERVICE = 'aerospike.namespace' + +class AerospikePlugin extends DatabasePlugin { + static get id () { return 'aerospike' } + static get operation () { return 'command' } + static get system () { return 'aerospike' } + static get prefix () { + return 'tracing:apm:aerospike:command' + } + + static get peerServicePrecursors () { + return [AEROSPIKE_PEER_SERVICE] + } + + bindStart (ctx) { + const { commandName, commandArgs } = ctx + const resourceName = commandName.slice(0, commandName.indexOf('Command')) + const store = storage.getStore() + const childOf = store ? store.span : null + const meta = getMeta(resourceName, commandArgs) + + const span = this.startSpan(this.operationName(), { + childOf, + service: this.serviceName({ pluginConfig: this.config }), + type: 'aerospike', + kind: 'client', + resource: resourceName, + meta + }, false) + + ctx.parentStore = store + ctx.currentStore = { ...store, span } + + return ctx.currentStore + } + + bindAsyncStart (ctx) { + if (ctx.currentStore) { + // have to manually trigger peer service calculation when using tracing channel + this.tagPeerService(ctx.currentStore.span) + ctx.currentStore.span.finish() + } + return ctx.parentStore + } + + end (ctx) { + if (ctx.result) { + // have to manually trigger peer service calculation when using tracing channel + this.tagPeerService(ctx.currentStore.span) + ctx.currentStore.span.finish() + } + } + + error (ctx) { + if (ctx.error) { + const error = ctx.error + const span = ctx.currentStore.span + span.setTag('error', error) + } + } +} + +function getMeta (resourceName, commandArgs) { + let meta = {} + if (resourceName.includes('Index')) { + const [ns, set, bin, index] = commandArgs + meta = getMetaForIndex(ns, set, bin, index) + } else if (resourceName === 'Query') { + const { ns, set } = commandArgs[2] + meta = getMetaForQuery({ ns, set }) + } else if (isKeyObject(commandArgs[0])) { + const { ns, set, key } = commandArgs[0] + meta = getMetaForKey(ns, set, key) + } + return meta +} + +function getMetaForIndex (ns, set, bin, index) { + return { + [AEROSPIKE_PEER_SERVICE]: ns, + 'aerospike.setname': set, + 'aerospike.bin': bin, + 'aerospike.index': index + } +} + +function getMetaForKey (ns, set, key) { + return { + 'aerospike.key': `${ns}:${set}:${key}`, + [AEROSPIKE_PEER_SERVICE]: ns, + 'aerospike.setname': set, + 'aerospike.userkey': key + } +} + +function getMetaForQuery (queryObj) { + const { ns, set } = queryObj + return { + [AEROSPIKE_PEER_SERVICE]: ns, + 'aerospike.setname': set + } +} + +function isKeyObject (obj) { + return obj && obj.ns !== undefined && obj.set !== undefined && obj.key !== undefined +} + +module.exports = AerospikePlugin diff --git a/packages/datadog-plugin-aerospike/test/index.spec.js b/packages/datadog-plugin-aerospike/test/index.spec.js new file mode 100644 index 00000000000..c1c47dffe83 --- /dev/null +++ b/packages/datadog-plugin-aerospike/test/index.spec.js @@ -0,0 +1,325 @@ +'use strict' + +const agent = require('../../dd-trace/test/plugins/agent') +const semver = require('semver') +const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') +const { expectedSchema, rawExpectedSchema } = require('./naming') +const { NODE_MAJOR } = require('../../../version') + +describe('Plugin', () => { + let aerospike + let config + let tracer + let ns + let set + let userKey + let key + let keyString + + describe('aerospike', () => { + withVersions('aerospike', 'aerospike', version => { + beforeEach(() => { + tracer = require('../../dd-trace') + aerospike = require(`../../../versions/aerospike@${version}`).get() + }) + + beforeEach(() => { + ns = 'test' + set = 'demo' + userKey = 'key' + + config = { + hosts: '127.0.0.1:3000', + port: '3000' + } + key = new aerospike.Key(ns, set, userKey) + keyString = `${ns}:${set}:${userKey}` + }) + + after(() => { + return agent.close({ ritmReset: false }) + }) + + describe('without configuration', () => { + before(() => { + return agent.load('aerospike') + }) + + describe('client', () => { + withPeerService( + () => tracer, + 'aerospike', + () => aerospike.connect(config).then(client => { + return client.put(key, { i: 123 }) + .then(() => client.close()) + }), + 'test', + 'aerospike.namespace' + ) + it('should instrument put', done => { + agent + .use(traces => { + const span = traces[0][0] + expect(span).to.have.property('name', expectedSchema.command.opName) + expect(span).to.have.property('service', expectedSchema.command.serviceName) + expect(span).to.have.property('resource', `Put`) + expect(span).to.have.property('type', 'aerospike') + expect(span.meta).to.have.property('span.kind', 'client') + expect(span.meta).to.have.property('aerospike.key', keyString) + expect(span.meta).to.have.property('aerospike.namespace', ns) + expect(span.meta).to.have.property('aerospike.setname', set) + expect(span.meta).to.have.property('aerospike.userkey', userKey) + expect(span.meta).to.have.property('component', 'aerospike') + }) + .then(done) + .catch(done) + + aerospike.connect(config).then(client => { + return client.put(key, { i: 123 }) + .then(() => client.close()) + }) + }) + + it('should instrument connect', done => { + agent + .use(traces => { + const span = traces[0][0] + expect(span).to.have.property('name', expectedSchema.command.opName) + expect(span).to.have.property('service', expectedSchema.command.serviceName) + expect(span).to.have.property('resource', `Connect`) + expect(span).to.have.property('type', 'aerospike') + expect(span.meta).to.have.property('span.kind', 'client') + expect(span.meta).to.have.property('component', 'aerospike') + }) + .then(done) + .catch(done) + + aerospike.connect(config).then(client => { client.close() }) + }) + + it('should instrument get', done => { + agent + .use(traces => { + const span = traces[0][0] + expect(span).to.have.property('name', expectedSchema.command.opName) + expect(span).to.have.property('service', expectedSchema.command.serviceName) + expect(span).to.have.property('resource', `Get`) + expect(span).to.have.property('type', 'aerospike') + expect(span.meta).to.have.property('span.kind', 'client') + expect(span.meta).to.have.property('aerospike.key', keyString) + expect(span.meta).to.have.property('aerospike.namespace', ns) + expect(span.meta).to.have.property('aerospike.setname', set) + expect(span.meta).to.have.property('aerospike.userkey', userKey) + expect(span.meta).to.have.property('component', 'aerospike') + }) + .then(done) + .catch(done) + + aerospike.connect(config).then(client => { + return client.get(key) + .then(() => client.close()) + }) + }) + + it('should instrument operate', done => { + agent + .use(traces => { + const span = traces[0][0] + expect(span).to.have.property('name', expectedSchema.command.opName) + expect(span).to.have.property('service', expectedSchema.command.serviceName) + expect(span).to.have.property('resource', `Operate`) + expect(span).to.have.property('type', 'aerospike') + expect(span.meta).to.have.property('span.kind', 'client') + expect(span.meta).to.have.property('aerospike.key', keyString) + expect(span.meta).to.have.property('aerospike.namespace', ns) + expect(span.meta).to.have.property('aerospike.setname', set) + expect(span.meta).to.have.property('aerospike.userkey', userKey) + expect(span.meta).to.have.property('component', 'aerospike') + }) + .then(done) + .catch(done) + + aerospike.connect(config).then(client => { + return client.put(key, { i: 123 }) + .then(() => { + const ops = [ + aerospike.operations.incr('i', 1), + aerospike.operations.read('i') + ] + return client.operate(key, ops) + }) + .then(() => client.close()) + }) + }) + + it('should instrument createIndex', done => { + agent + .use(traces => { + const span = traces[0][0] + expect(span).to.have.property('name', expectedSchema.command.opName) + expect(span).to.have.property('service', expectedSchema.command.serviceName) + expect(span).to.have.property('resource', `IndexCreate`) + expect(span).to.have.property('type', 'aerospike') + expect(span.meta).to.have.property('span.kind', 'client') + expect(span.meta).to.have.property('aerospike.namespace', ns) + expect(span.meta).to.have.property('aerospike.setname', 'demo') + expect(span.meta).to.have.property('aerospike.bin', 'tags') + expect(span.meta).to.have.property('aerospike.index', 'tags_idx') + expect(span.meta).to.have.property('component', 'aerospike') + }) + .then(done) + .catch(done) + + aerospike.connect(config).then(client => { + const index = { + ns: ns, + set: 'demo', + bin: 'tags', + index: 'tags_idx', + type: aerospike.indexType.LIST, + datatype: aerospike.indexDataType.STRING + } + return client.createIndex(index) + .then(() => client.close()) + }) + }) + + // skip query tests for node 16 and aerospike 4 because of an aerospike error that occurs when using query: + // AerospikeError: Sometimes our doc, or our customers' wishes, get ahead of us. + // We may have processed something that the server is not ready for (unsupported feature). + // this test works on node 14, so it is not a problem with the test but most likely a problem with the package + // version and aerospike server version mismatch which is really hard to pin down, since aerospike doesn't + // provide info on package version's compatibility with each server version + if (!(NODE_MAJOR === 16 && semver.intersects(version, '^4'))) { + it('should instrument query', done => { + agent + .use(traces => { + const span = traces[0][0] + expect(span).to.have.property('name', expectedSchema.command.opName) + expect(span).to.have.property('service', expectedSchema.command.serviceName) + expect(span).to.have.property('resource', `Query`) + expect(span).to.have.property('type', 'aerospike') + expect(span.meta).to.have.property('span.kind', 'client') + expect(span.meta).to.have.property('aerospike.namespace', ns) + expect(span.meta).to.have.property('aerospike.setname', set) + expect(span.meta).to.have.property('component', 'aerospike') + }) + .then(done) + .catch(done) + + aerospike.connect(config).then(client => { + const index = { + ns: ns, + set: 'demo', + bin: 'tags', + index: 'tags_idx', + datatype: aerospike.indexDataType.STRING + } + client.createIndex(index, (error, job) => { + job.waitUntilDone((waitError) => { + const query = client.query(ns, 'demo') + const queryPolicy = { + totalTimeout: 10000 + } + query.select('id', 'tags') + query.where(aerospike.filter.contains('tags', 'green', aerospike.indexType.LIST)) + const stream = query.foreach(queryPolicy) + stream.on('end', () => { client.close() }) + }) + }) + }) + }) + } + it('should run the callback in the parent context', done => { + const obj = {} + aerospike.connect(config).then(client => { + tracer.scope().activate(obj, () => { + client.put(key, { i: 123 }, () => { + expect(tracer.scope().active()).to.equal(obj) + client.close() + done() + }) + }) + }) + }) + + it('should handle errors', done => { + let error + + agent + .use(traces => { + expect(traces[0][0].meta).to.have.property(ERROR_TYPE, error.name) + expect(traces[0][0].meta).to.have.property(ERROR_MESSAGE, error.message) + expect(traces[0][0].meta).to.have.property(ERROR_STACK, error.stack) + expect(traces[0][0].meta).to.have.property('component', 'aerospike') + }) + .then(done) + .catch(done) + + aerospike.connect(config) + .then(client => { + return client.put(key, { i: 'not_a_number' }) + .then(() => { + const ops = [ + aerospike.operations.incr('i', 1), + aerospike.operations.read('i') + ] + + return client.operate(key, ops) + }) + .then(() => client.close()) + }) + .catch(err => { + error = err + }) + }) + withNamingSchema( + () => aerospike.connect(config).then(client => { + return client.put(key, { i: 123 }) + .then(() => client.close()) + }), + rawExpectedSchema.command + ) + }) + }) + + describe('with configuration', () => { + before(() => { + return agent.load('aerospike', { service: 'custom' }) + }) + + it('should be configured with the correct values', done => { + agent + .use(traces => { + expect(traces[0][0]).to.have.property('name', expectedSchema.command.opName) + expect(traces[0][0]).to.have.property('service', 'custom') + }) + .then(done) + .catch(done) + + aerospike.connect(config).then(client => { + return client.put(key, { i: 123 }) + .then(() => client.close()) + }) + }) + + withNamingSchema( + () => aerospike.connect(config).then(client => { + return client.put(key, { i: 123 }) + .then(() => client.close()) + }), + { + v0: { + opName: 'aerospike.command', + serviceName: 'custom' + }, + v1: { + opName: 'aerospike.command', + serviceName: 'custom' + } + } + ) + }) + }) + }) +}) diff --git a/packages/datadog-plugin-aerospike/test/naming.js b/packages/datadog-plugin-aerospike/test/naming.js new file mode 100644 index 00000000000..75c360d9999 --- /dev/null +++ b/packages/datadog-plugin-aerospike/test/naming.js @@ -0,0 +1,19 @@ +const { resolveNaming } = require('../../dd-trace/test/plugins/helpers') + +const rawExpectedSchema = { + command: { + v0: { + opName: 'aerospike.command', + serviceName: 'test-aerospike' + }, + v1: { + opName: 'aerospike.command', + serviceName: 'test' + } + } +} + +module.exports = { + rawExpectedSchema: rawExpectedSchema, + expectedSchema: resolveNaming(rawExpectedSchema) +} diff --git a/packages/dd-trace/src/plugins/index.js b/packages/dd-trace/src/plugins/index.js index ad92d0ffc43..d2a22cd8b15 100644 --- a/packages/dd-trace/src/plugins/index.js +++ b/packages/dd-trace/src/plugins/index.js @@ -17,6 +17,7 @@ module.exports = { get '@opensearch-project/opensearch' () { return require('../../../datadog-plugin-opensearch/src') }, get '@redis/client' () { return require('../../../datadog-plugin-redis/src') }, get '@smithy/smithy-client' () { return require('../../../datadog-plugin-aws-sdk/src') }, + get 'aerospike' () { return require('../../../datadog-plugin-aerospike/src') }, get 'amqp10' () { return require('../../../datadog-plugin-amqp10/src') }, get 'amqplib' () { return require('../../../datadog-plugin-amqplib/src') }, get 'aws-sdk' () { return require('../../../datadog-plugin-aws-sdk/src') }, diff --git a/packages/dd-trace/src/service-naming/schemas/v0/storage.js b/packages/dd-trace/src/service-naming/schemas/v0/storage.js index e652d71e7fa..2eecfb95e4d 100644 --- a/packages/dd-trace/src/service-naming/schemas/v0/storage.js +++ b/packages/dd-trace/src/service-naming/schemas/v0/storage.js @@ -37,6 +37,11 @@ const redisConfig = { const storage = { client: { + aerospike: { + opName: () => 'aerospike.command', + serviceName: ({ tracerService, pluginConfig }) => + pluginConfig.service || `${tracerService}-aerospike` + }, 'cassandra-driver': { opName: () => 'cassandra.query', serviceName: ({ tracerService, pluginConfig, system }) => diff --git a/packages/dd-trace/src/service-naming/schemas/v1/storage.js b/packages/dd-trace/src/service-naming/schemas/v1/storage.js index 0eb83aff976..3b1de3c63a0 100644 --- a/packages/dd-trace/src/service-naming/schemas/v1/storage.js +++ b/packages/dd-trace/src/service-naming/schemas/v1/storage.js @@ -22,6 +22,10 @@ function withFunction ({ tracerService, pluginConfig, params }) { const storage = { client: { + aerospike: { + opName: () => 'aerospike.command', + serviceName: configWithFallback + }, 'cassandra-driver': { opName: () => 'cassandra.query', serviceName: configWithFallback diff --git a/packages/dd-trace/test/setup/mocha.js b/packages/dd-trace/test/setup/mocha.js index 840684761a5..7d75cd41170 100644 --- a/packages/dd-trace/test/setup/mocha.js +++ b/packages/dd-trace/test/setup/mocha.js @@ -192,7 +192,9 @@ function withVersions (plugin, modules, range, cb) { instrumentations .filter(instrumentation => instrumentation.name === moduleName) .forEach(instrumentation => { - instrumentation.versions + const versions = process.env.PACKAGE_VERSION_RANGE ? [process.env.PACKAGE_VERSION_RANGE] + : instrumentation.versions + versions .filter(version => !process.env.RANGE || semver.subset(version, process.env.RANGE)) .forEach(version => { const min = semver.coerce(version).version diff --git a/scripts/install_plugin_modules.js b/scripts/install_plugin_modules.js index 5e912cfb419..b76365bda67 100644 --- a/scripts/install_plugin_modules.js +++ b/scripts/install_plugin_modules.js @@ -80,7 +80,9 @@ async function assertVersions () { } async function assertInstrumentation (instrumentation, external) { - const versions = [].concat(instrumentation.versions || []) + const versions = process.env.PACKAGE_VERSION_RANGE ? [process.env.PACKAGE_VERSION_RANGE] + : [].concat(instrumentation.versions || []) + for (const version of versions) { if (version) { await assertModules(instrumentation.name, semver.coerce(version).version, external) @@ -130,8 +132,14 @@ async function assertPackage (name, version, dependency, external) { } if (!external) { - pkg.workspaces = { - nohoist: ['**/**'] + if (name === 'aerospike') { + pkg.installConfig = { + 'hoistingLimits': 'workspaces' + } + } else { + pkg.workspaces = { + nohoist: ['**/**'] + } } } fs.writeFileSync(filename(name, version, 'package.json'), JSON.stringify(pkg, null, 2) + '\n') From bc9a5da768cb2328a7b131a8820935fb08849a2f Mon Sep 17 00:00:00 2001 From: Ayan Khan Date: Wed, 29 Nov 2023 16:42:48 -0500 Subject: [PATCH 088/147] Instrument Aerospike V3 (#3830) * Instrument Aerospike v3.16.2 - v3.16.7 --- .github/workflows/plugins.yml | 60 +++++++++++++++++++ .../datadog-instrumentations/src/aerospike.js | 2 +- .../test/index.spec.js | 41 ++++++++----- 3 files changed, 87 insertions(+), 16 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 593882fab65..2e2aa3b5764 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -20,6 +20,66 @@ env: jobs: + aerospike-3: + runs-on: ubuntu-latest + container: + image: ubuntu:18.04 + services: + aerospike: + image: aerospike:ce-6.4.0.3 + ports: + - 3000:3000 + testagent: + image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:latest + env: + LOG_LEVEL: DEBUG + TRACE_LANGUAGE: javascript + DISABLED_CHECKS: trace_content_length + PORT: 9126 + ports: + - 9126:9126 + env: + PLUGINS: aerospike + SERVICES: aerospike + PACKAGE_VERSION_RANGE: '3.16.2 - 3.16.7' + DD_TEST_AGENT_URL: http://testagent:9126 + AEROSPIKE_HOST_ADDRESS: aerospike + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-node@v3 + with: + node-version: '14' + - id: pkg + run: | + content=`cat ./package.json | tr '\n' ' '` + echo "::set-output name=json::$content" + - id: extract + run: | + version="${{fromJson(steps.pkg.outputs.json).version}}" + majorVersion=$(echo "$version" | cut -d '.' -f 1) + echo "Major Version: $majorVersion" + echo "MAJOR_VERSION=$majorVersion" >> $GITHUB_ENV + - name: Check package version + if: env.MAJOR_VERSION == '3' + run: | + echo "Package version is 3. Proceeding with the next steps." + - name: Install dependencies + if: env.MAJOR_VERSION == '3' + run: | + apt-get update && \ + apt-get install -y \ + python3 python3-pip \ + wget \ + g++ libssl1.0.0 libssl-dev zlib1g-dev && \ + npm install -g yarn + - if: env.MAJOR_VERSION == '3' + run: yarn install --ignore-engines + - if: env.MAJOR_VERSION == '3' + uses: ./.github/actions/node/14 + - if: env.MAJOR_VERSION == '3' + run: yarn test:plugins:ci + - if: env.MAJOR_VERSION == '3' + uses: codecov/codecov-action@v2 aerospike: runs-on: ubuntu-latest services: diff --git a/packages/datadog-instrumentations/src/aerospike.js b/packages/datadog-instrumentations/src/aerospike.js index 2d20bdcafa0..dade4f81895 100644 --- a/packages/datadog-instrumentations/src/aerospike.js +++ b/packages/datadog-instrumentations/src/aerospike.js @@ -40,7 +40,7 @@ function wrapProcess (process) { addHook({ name: 'aerospike', file: 'lib/commands/command.js', - versions: ['4', '5'] + versions: ['^3.16.2', '4', '5'] }, commandFactory => { return shimmer.wrap(commandFactory, wrapCreateCommand(commandFactory)) diff --git a/packages/datadog-plugin-aerospike/test/index.spec.js b/packages/datadog-plugin-aerospike/test/index.spec.js index c1c47dffe83..11202ef9cd4 100644 --- a/packages/datadog-plugin-aerospike/test/index.spec.js +++ b/packages/datadog-plugin-aerospike/test/index.spec.js @@ -29,8 +29,9 @@ describe('Plugin', () => { userKey = 'key' config = { - hosts: '127.0.0.1:3000', - port: '3000' + hosts: [ + { addr: process.env.AEROSPIKE_HOST_ADDRESS ? process.env.AEROSPIKE_HOST_ADDRESS : '127.0.0.1', port: 3000 } + ] } key = new aerospike.Key(ns, set, userKey) keyString = `${ns}:${set}:${userKey}` @@ -45,13 +46,17 @@ describe('Plugin', () => { return agent.load('aerospike') }) + after(() => { + aerospike.releaseEventLoop() + }) + describe('client', () => { withPeerService( () => tracer, 'aerospike', () => aerospike.connect(config).then(client => { return client.put(key, { i: 123 }) - .then(() => client.close()) + .then(() => client.close(false)) }), 'test', 'aerospike.namespace' @@ -76,7 +81,9 @@ describe('Plugin', () => { aerospike.connect(config).then(client => { return client.put(key, { i: 123 }) - .then(() => client.close()) + .then(() => { + client.close(false) + }) }) }) @@ -94,7 +101,7 @@ describe('Plugin', () => { .then(done) .catch(done) - aerospike.connect(config).then(client => { client.close() }) + aerospike.connect(config).then(client => { client.close(false) }) }) it('should instrument get', done => { @@ -117,7 +124,7 @@ describe('Plugin', () => { aerospike.connect(config).then(client => { return client.get(key) - .then(() => client.close()) + .then(() => client.close(false)) }) }) @@ -148,7 +155,7 @@ describe('Plugin', () => { ] return client.operate(key, ops) }) - .then(() => client.close()) + .then(() => client.close(false)) }) }) @@ -180,7 +187,7 @@ describe('Plugin', () => { datatype: aerospike.indexDataType.STRING } return client.createIndex(index) - .then(() => client.close()) + .then(() => client.close(false)) }) }) @@ -190,7 +197,7 @@ describe('Plugin', () => { // this test works on node 14, so it is not a problem with the test but most likely a problem with the package // version and aerospike server version mismatch which is really hard to pin down, since aerospike doesn't // provide info on package version's compatibility with each server version - if (!(NODE_MAJOR === 16 && semver.intersects(version, '^4'))) { + if (!(NODE_MAJOR === 16 && semver.intersects(version, '^4')) && !semver.intersects(version, '^3')) { it('should instrument query', done => { agent .use(traces => { @@ -224,7 +231,7 @@ describe('Plugin', () => { query.select('id', 'tags') query.where(aerospike.filter.contains('tags', 'green', aerospike.indexType.LIST)) const stream = query.foreach(queryPolicy) - stream.on('end', () => { client.close() }) + stream.on('end', () => { client.close(false) }) }) }) }) @@ -236,7 +243,7 @@ describe('Plugin', () => { tracer.scope().activate(obj, () => { client.put(key, { i: 123 }, () => { expect(tracer.scope().active()).to.equal(obj) - client.close() + client.close(false) done() }) }) @@ -267,7 +274,7 @@ describe('Plugin', () => { return client.operate(key, ops) }) - .then(() => client.close()) + .then(() => client.close(false)) }) .catch(err => { error = err @@ -276,7 +283,7 @@ describe('Plugin', () => { withNamingSchema( () => aerospike.connect(config).then(client => { return client.put(key, { i: 123 }) - .then(() => client.close()) + .then(() => client.close(false)) }), rawExpectedSchema.command ) @@ -288,6 +295,10 @@ describe('Plugin', () => { return agent.load('aerospike', { service: 'custom' }) }) + after(() => { + aerospike.releaseEventLoop() + }) + it('should be configured with the correct values', done => { agent .use(traces => { @@ -299,14 +310,14 @@ describe('Plugin', () => { aerospike.connect(config).then(client => { return client.put(key, { i: 123 }) - .then(() => client.close()) + .then(() => client.close(false)) }) }) withNamingSchema( () => aerospike.connect(config).then(client => { return client.put(key, { i: 123 }) - .then(() => client.close()) + .then(() => client.close(false)) }), { v0: { From 5a6903e54f7ab04ac021f7b1739b27ca05d3491c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 1 Dec 2023 10:22:16 +0100 Subject: [PATCH 089/147] [ci-visibility] New ITR settings logic (#3790) --- .../exporters/ci-visibility-exporter.js | 18 +++- .../get-itr-configuration.js | 9 +- packages/dd-trace/src/plugins/ci_plugin.js | 3 +- .../exporters/agentless/exporter.spec.js | 5 ++ .../exporters/ci-visibility-exporter.spec.js | 87 ++++++++++++++++++- 5 files changed, 116 insertions(+), 6 deletions(-) diff --git a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js index 43f707c2fe7..446479e1af7 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js +++ b/packages/dd-trace/src/ci-visibility/exporters/ci-visibility-exporter.js @@ -143,7 +143,23 @@ class CiVisibilityExporter extends AgentInfoExporter { * where the tests run in a subprocess, because `getItrConfiguration` is called only once. */ this._itrConfig = itrConfig - callback(err, itrConfig) + + if (err) { + callback(err, {}) + } else if (itrConfig?.requireGit) { + // If the backend requires git, we'll wait for the upload to finish and request settings again + this._gitUploadPromise.then(gitUploadError => { + if (gitUploadError) { + return callback(gitUploadError, {}) + } + getItrConfigurationRequest(configuration, (err, finalItrConfig) => { + this._itrConfig = finalItrConfig + callback(err, finalItrConfig) + }) + }) + } else { + callback(null, itrConfig) + } }) }) } diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js index 60abda9294e..d3bfc86dc36 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js @@ -15,6 +15,7 @@ function getItrConfiguration ({ runtimeName, runtimeVersion, branch, + testLevel = 'suite', custom }, done) { const options = { @@ -23,7 +24,8 @@ function getItrConfiguration ({ headers: { 'Content-Type': 'application/json' }, - url + url, + timeout: 20000 } if (isEvpProxy) { @@ -42,7 +44,7 @@ function getItrConfiguration ({ id: id().toString(10), type: 'ci_app_test_service_libraries_settings', attributes: { - test_level: 'suite', + test_level: testLevel, configurations: { 'os.platform': osPlatform, 'os.version': osVersion, @@ -73,6 +75,7 @@ function getItrConfiguration ({ let isCodeCoverageEnabled = attributes.code_coverage let isSuitesSkippingEnabled = attributes.tests_skipping + const { require_git: requireGit } = attributes log.debug(() => `Remote settings: ${JSON.stringify({ isCodeCoverageEnabled, isSuitesSkippingEnabled })}`) @@ -85,7 +88,7 @@ function getItrConfiguration ({ log.debug(() => 'Dangerously set test skipping to true') } - done(null, { isCodeCoverageEnabled, isSuitesSkippingEnabled }) + done(null, { isCodeCoverageEnabled, isSuitesSkippingEnabled, requireGit }) } catch (err) { done(err) } diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index 89ffffda933..0112c4cb4fa 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -124,7 +124,8 @@ module.exports = class CiPlugin extends Plugin { osArchitecture, runtimeName, runtimeVersion, - branch + branch, + testLevel: 'suite' } } diff --git a/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js index 6577bce9993..9c12087dbe0 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/agentless/exporter.spec.js @@ -41,6 +41,7 @@ describe('CI Visibility Agentless Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + require_git: false, code_coverage: true, tests_skipping: true } @@ -66,6 +67,7 @@ describe('CI Visibility Agentless Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + require_git: false, code_coverage: true, tests_skipping: true } @@ -96,6 +98,7 @@ describe('CI Visibility Agentless Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + require_git: false, code_coverage: true, tests_skipping: true } @@ -118,6 +121,7 @@ describe('CI Visibility Agentless Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + require_git: false, code_coverage: true, tests_skipping: true } @@ -141,6 +145,7 @@ describe('CI Visibility Agentless Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + require_git: false, code_coverage: true, tests_skipping: true } diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index 718e592b97c..22939b89e13 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -119,6 +119,7 @@ describe('CI Visibility Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + require_git: false, code_coverage: true, tests_skipping: true } @@ -133,7 +134,7 @@ describe('CI Visibility Exporter', () => { } }) - ciVisibilityExporter.getItrConfiguration({}, (err, itrConfig) => { + ciVisibilityExporter.getItrConfiguration({}, () => { expect(scope.isDone()).to.be.true expect(customConfig).to.eql({ 'my_custom_config': 'my_custom_config_value' @@ -148,6 +149,7 @@ describe('CI Visibility Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + require_git: false, code_coverage: true, tests_skipping: true } @@ -158,6 +160,7 @@ describe('CI Visibility Exporter', () => { ciVisibilityExporter.getItrConfiguration({}, (err, itrConfig) => { expect(itrConfig).to.eql({ + requireGit: false, isCodeCoverageEnabled: true, isSuitesSkippingEnabled: true }) @@ -173,6 +176,7 @@ describe('CI Visibility Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + require_git: false, code_coverage: true, tests_skipping: true } @@ -188,6 +192,87 @@ describe('CI Visibility Exporter', () => { }) ciVisibilityExporter._resolveCanUseCiVisProtocol(true) }) + it('will retry ITR configuration request if require_git is true', (done) => { + const TIME_TO_UPLOAD_GIT = 50 + let hasUploadedGit = false + const scope = nock(`http://localhost:${port}`) + .post('/api/v2/libraries/tests/services/setting') + .reply(200, JSON.stringify({ + data: { + attributes: { + require_git: true, + code_coverage: true, + tests_skipping: true + } + } + })) + .post('/api/v2/libraries/tests/services/setting') + .reply(200, JSON.stringify({ + data: { + attributes: { + require_git: false, + code_coverage: true, + tests_skipping: true + } + } + })) + + const ciVisibilityExporter = new CiVisibilityExporter({ + port, isIntelligentTestRunnerEnabled: true + }) + ciVisibilityExporter._resolveCanUseCiVisProtocol(true) + expect(ciVisibilityExporter.shouldRequestItrConfiguration()).to.be.true + ciVisibilityExporter.getItrConfiguration({}, (err, itrConfig) => { + expect(scope.isDone()).to.be.true + expect(err).to.be.null + // the second request returns require_git: false + expect(itrConfig.requireGit).to.be.false + expect(hasUploadedGit).to.be.true + done() + }) + // Git upload finishes after a bit + setTimeout(() => { + ciVisibilityExporter._resolveGit() + hasUploadedGit = true + }, TIME_TO_UPLOAD_GIT) + }) + it('will retry ITR configuration request immediately if git upload is already finished', (done) => { + const scope = nock(`http://localhost:${port}`) + .post('/api/v2/libraries/tests/services/setting') + .reply(200, JSON.stringify({ + data: { + attributes: { + require_git: true, + code_coverage: true, + tests_skipping: true + } + } + })) + .post('/api/v2/libraries/tests/services/setting') + .reply(200, JSON.stringify({ + data: { + attributes: { + require_git: false, + code_coverage: true, + tests_skipping: true + } + } + })) + + const ciVisibilityExporter = new CiVisibilityExporter({ + port, isIntelligentTestRunnerEnabled: true + }) + ciVisibilityExporter._resolveCanUseCiVisProtocol(true) + expect(ciVisibilityExporter.shouldRequestItrConfiguration()).to.be.true + ciVisibilityExporter.getItrConfiguration({}, (err, itrConfig) => { + expect(scope.isDone()).to.be.true + expect(err).to.be.null + // the second request returns require_git: false + expect(itrConfig.requireGit).to.be.false + done() + }) + ciVisibilityExporter._resolveGit() + }) }) }) From d34244fa303a1f2d2781eaf038046dafe8b4ff0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 1 Dec 2023 11:29:22 +0100 Subject: [PATCH 090/147] =?UTF-8?q?[ci-visibility]=C2=A0Do=20not=20report?= =?UTF-8?q?=20total=20code=20coverage=20unless=20user=20has=20explicitly?= =?UTF-8?q?=20added=20code=20coverage=20(#3828)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- integration-tests/ci-visibility-intake.js | 3 +- integration-tests/ci-visibility.spec.js | 52 +++++++++++++++++++ integration-tests/ci-visibility/run-jest.js | 2 +- integration-tests/ci-visibility/run-jest.mjs | 2 +- integration-tests/cucumber/cucumber.spec.js | 5 ++ packages/datadog-instrumentations/src/jest.js | 16 ++++-- .../get-itr-configuration.js | 19 ++++--- packages/dd-trace/src/plugins/util/test.js | 5 +- .../exporters/ci-visibility-exporter.spec.js | 4 ++ 9 files changed, 90 insertions(+), 18 deletions(-) diff --git a/integration-tests/ci-visibility-intake.js b/integration-tests/ci-visibility-intake.js index c12ffc274f0..2efbba2de03 100644 --- a/integration-tests/ci-visibility-intake.js +++ b/integration-tests/ci-visibility-intake.js @@ -10,7 +10,8 @@ const { FakeAgent } = require('./helpers') const DEFAULT_SETTINGS = { code_coverage: true, - tests_skipping: true + tests_skipping: true, + itr_enabled: true } const DEFAULT_SUITES_TO_SKIP = [] diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index 538713ff842..b9cf69c4c41 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -446,6 +446,51 @@ testFrameworks.forEach(({ }).catch(done) }) }) + it('does not report total code coverage % if user has not configured coverage manually', (done) => { + receiver.setSettings({ + itr_enabled: true, + code_coverage: true, + tests_skipping: false + }) + + receiver.assertPayloadReceived(({ payload }) => { + const testSession = payload.events.find(event => event.type === 'test_session_end').content + assert.notProperty(testSession.metrics, TEST_CODE_COVERAGE_LINES_PCT) + }, ({ url }) => url === '/api/v2/citestcycle').then(() => done()).catch(done) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + DISABLE_CODE_COVERAGE: '1' + }, + stdio: 'inherit' + } + ) + }) + it('reports total code coverage % even when ITR is disabled', (done) => { + receiver.setSettings({ + itr_enabled: false, + code_coverage: false, + tests_skipping: false + }) + + receiver.assertPayloadReceived(({ payload }) => { + const testSession = payload.events.find(event => event.type === 'test_session_end').content + assert.exists(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) + }, ({ url }) => url === '/api/v2/citestcycle').then(() => done()).catch(done) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: getCiVisAgentlessConfig(receiver.port), + stdio: 'inherit' + } + ) + }) } it('can run tests and report spans', (done) => { @@ -703,6 +748,7 @@ testFrameworks.forEach(({ }) it('does not report code coverage if disabled by the API', (done) => { receiver.setSettings({ + itr_enabled: false, code_coverage: false, tests_skipping: false }) @@ -720,6 +766,7 @@ testFrameworks.forEach(({ assert.propertyVal(testSession.meta, TEST_ITR_TESTS_SKIPPED, 'false') assert.propertyVal(testSession.meta, TEST_CODE_COVERAGE_ENABLED, 'false') assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_ENABLED, 'false') + assert.exists(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) const testModule = payload.events.find(event => event.type === 'test_module_end').content assert.propertyVal(testModule.meta, TEST_ITR_TESTS_SKIPPED, 'false') assert.propertyVal(testModule.meta, TEST_CODE_COVERAGE_ENABLED, 'false') @@ -879,6 +926,7 @@ testFrameworks.forEach(({ }) it('does not skip tests if test skipping is disabled by the API', (done) => { receiver.setSettings({ + itr_enabled: true, code_coverage: true, tests_skipping: false }) @@ -1268,6 +1316,7 @@ testFrameworks.forEach(({ }) it('does not report code coverage if disabled by the API', (done) => { receiver.setSettings({ + itr_enabled: false, code_coverage: false, tests_skipping: false }) @@ -1282,6 +1331,8 @@ testFrameworks.forEach(({ assert.propertyVal(headers, 'x-datadog-evp-subdomain', 'citestcycle-intake') const eventTypes = payload.events.map(event => event.type) assert.includeMembers(eventTypes, ['test', 'test_session_end', 'test_module_end', 'test_suite_end']) + const testSession = payload.events.find(event => event.type === 'test_session_end').content + assert.exists(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) }, ({ url }) => url === '/evp_proxy/v2/api/v2/citestcycle').then(() => done()).catch(done) childProcess = exec( @@ -1479,6 +1530,7 @@ testFrameworks.forEach(({ }, ({ url }) => url === '/evp_proxy/v2/api/v2/citestcycle').then(() => done()).catch(done) receiver.setSettings({ + itr_enabled: true, code_coverage: true, tests_skipping: false }) diff --git a/integration-tests/ci-visibility/run-jest.js b/integration-tests/ci-visibility/run-jest.js index 822c132f1bc..50f569aa902 100644 --- a/integration-tests/ci-visibility/run-jest.js +++ b/integration-tests/ci-visibility/run-jest.js @@ -5,7 +5,7 @@ const options = { testPathIgnorePatterns: ['/node_modules/'], cache: false, testRegex: process.env.TESTS_TO_RUN ? new RegExp(process.env.TESTS_TO_RUN) : /test\/ci-visibility-test/, - coverage: true, + coverage: !process.env.DISABLE_CODE_COVERAGE, runInBand: true, shard: process.env.TEST_SHARD || undefined } diff --git a/integration-tests/ci-visibility/run-jest.mjs b/integration-tests/ci-visibility/run-jest.mjs index 3bd90cb91ca..a35ddda382c 100644 --- a/integration-tests/ci-visibility/run-jest.mjs +++ b/integration-tests/ci-visibility/run-jest.mjs @@ -8,7 +8,7 @@ const options = { testPathIgnorePatterns: ['/node_modules/'], cache: false, testRegex: process.env.TESTS_TO_RUN ? new RegExp(process.env.TESTS_TO_RUN) : /test\/ci-visibility-test/, - coverage: true, + coverage: !process.env.DISABLE_CODE_COVERAGE, runInBand: true, shard: process.env.TEST_SHARD || undefined } diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index 8be97a841fd..dfdbf13ebb6 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -366,6 +366,7 @@ versions.forEach(version => { }) it('does not report code coverage if disabled by the API', (done) => { receiver.setSettings({ + itr_enabled: false, code_coverage: false, tests_skipping: false }) @@ -382,6 +383,7 @@ versions.forEach(version => { assert.propertyVal(testSession.meta, TEST_ITR_TESTS_SKIPPED, 'false') assert.propertyVal(testSession.meta, TEST_CODE_COVERAGE_ENABLED, 'false') assert.propertyVal(testSession.meta, TEST_ITR_SKIPPING_ENABLED, 'false') + assert.exists(testSession.metrics[TEST_CODE_COVERAGE_LINES_PCT]) const testModule = payload.events.find(event => event.type === 'test_module_end').content assert.propertyVal(testModule.meta, TEST_ITR_TESTS_SKIPPED, 'false') assert.propertyVal(testModule.meta, TEST_CODE_COVERAGE_ENABLED, 'false') @@ -514,6 +516,7 @@ versions.forEach(version => { }) it('does not skip tests if test skipping is disabled by the API', (done) => { receiver.setSettings({ + itr_enabled: true, code_coverage: true, tests_skipping: false }) @@ -551,6 +554,7 @@ versions.forEach(version => { }) it('does not skip suites if suite is marked as unskippable', (done) => { receiver.setSettings({ + itr_enabled: true, code_coverage: true, tests_skipping: true }) @@ -618,6 +622,7 @@ versions.forEach(version => { }) it('only sets forced to run if suite was going to be skipped by ITR', (done) => { receiver.setSettings({ + itr_enabled: true, code_coverage: true, tests_skipping: true }) diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index 233bad2bfc3..f62f0c9fac9 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -44,6 +44,7 @@ const itrSkippedSuitesCh = channel('ci:jest:itr:skipped-suites') let skippableSuites = [] let isCodeCoverageEnabled = false let isSuitesSkippingEnabled = false +let isUserCodeCoverageEnabled = false let isSuitesSkipped = false let numSkippedSuites = 0 let hasUnskippableSuites = false @@ -289,11 +290,14 @@ function cliWrapper (cli, jestVersion) { } = result let testCodeCoverageLinesTotal - try { - const { pct, total } = coverageMap.getCoverageSummary().lines - testCodeCoverageLinesTotal = total !== 0 ? pct : 0 - } catch (e) { - // ignore errors + + if (isUserCodeCoverageEnabled) { + try { + const { pct, total } = coverageMap.getCoverageSummary().lines + testCodeCoverageLinesTotal = total !== 0 ? pct : 0 + } catch (e) { + // ignore errors + } } let status, error @@ -436,6 +440,8 @@ function configureTestEnvironment (readConfigsResult) { config.testEnvironmentOptions._ddTestCodeCoverageEnabled = isCodeCoverageEnabled }) + isUserCodeCoverageEnabled = !!readConfigsResult.globalConfig.collectCoverage + if (isCodeCoverageEnabled) { const globalConfig = { ...readConfigsResult.globalConfig, diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js index d3bfc86dc36..2aee819004d 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js @@ -69,26 +69,29 @@ function getItrConfiguration ({ try { const { data: { - attributes + attributes: { + code_coverage: isCodeCoverageEnabled, + tests_skipping: isSuitesSkippingEnabled, + itr_enabled: isItrEnabled, + require_git: requireGit + } } } = JSON.parse(res) - let isCodeCoverageEnabled = attributes.code_coverage - let isSuitesSkippingEnabled = attributes.tests_skipping - const { require_git: requireGit } = attributes + const settings = { isCodeCoverageEnabled, isSuitesSkippingEnabled, isItrEnabled, requireGit } - log.debug(() => `Remote settings: ${JSON.stringify({ isCodeCoverageEnabled, isSuitesSkippingEnabled })}`) + log.debug(() => `Remote settings: ${JSON.stringify(settings)}`) if (process.env.DD_CIVISIBILITY_DANGEROUSLY_FORCE_COVERAGE) { - isCodeCoverageEnabled = true + settings.isCodeCoverageEnabled = true log.debug(() => 'Dangerously set code coverage to true') } if (process.env.DD_CIVISIBILITY_DANGEROUSLY_FORCE_TEST_SKIPPING) { - isSuitesSkippingEnabled = true + settings.isSuitesSkippingEnabled = true log.debug(() => 'Dangerously set test skipping to true') } - done(null, { isCodeCoverageEnabled, isSuitesSkippingEnabled, requireGit }) + done(null, settings) } catch (err) { done(err) } diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index d6d0daf93b4..20e35ae416e 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -397,8 +397,9 @@ function addIntelligentTestRunnerSpanTags ( testModuleSpan.setTag(TEST_ITR_FORCED_RUN, 'true') } - // If suites have been skipped we don't want to report the total coverage, as it will be wrong - if (testCodeCoverageLinesTotal !== undefined && !isSuitesSkipped) { + // This will not be reported unless the user has manually added code coverage. + // This is always the case for Mocha and Cucumber, but not for Jest. + if (testCodeCoverageLinesTotal !== undefined) { testSessionSpan.setTag(TEST_CODE_COVERAGE_LINES_PCT, testCodeCoverageLinesTotal) testModuleSpan.setTag(TEST_CODE_COVERAGE_LINES_PCT, testCodeCoverageLinesTotal) } diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index 22939b89e13..f3d331be567 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -119,6 +119,7 @@ describe('CI Visibility Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + itr_enabled: true, require_git: false, code_coverage: true, tests_skipping: true @@ -149,6 +150,7 @@ describe('CI Visibility Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + itr_enabled: true, require_git: false, code_coverage: true, tests_skipping: true @@ -162,6 +164,7 @@ describe('CI Visibility Exporter', () => { expect(itrConfig).to.eql({ requireGit: false, isCodeCoverageEnabled: true, + isItrEnabled: true, isSuitesSkippingEnabled: true }) expect(err).not.to.exist @@ -176,6 +179,7 @@ describe('CI Visibility Exporter', () => { .reply(200, JSON.stringify({ data: { attributes: { + itr_enabled: true, require_git: false, code_coverage: true, tests_skipping: true From 0a59f512d8a47390d8a49aeff3f7957608e7cd47 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Fri, 1 Dec 2023 15:06:37 +0100 Subject: [PATCH 091/147] PROF-8520: Add DNS events to timeline (#3822) --- integration-tests/profiler.spec.js | 95 ++++++- integration-tests/profiler/dnstest.js | 13 + .../src/profiling/profilers/events.js | 237 ++++++++++++------ 3 files changed, 262 insertions(+), 83 deletions(-) create mode 100644 integration-tests/profiler/dnstest.js diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index cb65228a2e4..f1d4f147bc5 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -50,6 +50,19 @@ function processExitPromise (proc, timeout, expectBadExit = false) { }) } +async function getLatestProfile (cwd, pattern) { + const dirEntries = await fs.readdir(cwd) + // Get the latest file matching the pattern + const pprofEntries = dirEntries.filter(name => pattern.test(name)) + assert.isTrue(pprofEntries.length > 0, `No file matching pattern ${pattern} found in ${cwd}`) + const pprofEntry = pprofEntries + .map(name => ({ name, modified: fsync.statSync(path.join(cwd, name), { bigint: true }).mtimeNs })) + .reduce((a, b) => a.modified > b.modified ? a : b) + .name + const pprofGzipped = await fs.readFile(path.join(cwd, pprofEntry)) + const pprofUnzipped = zlib.gunzipSync(pprofGzipped) + return Profile.decode(pprofUnzipped) +} describe('profiler', () => { let agent let proc @@ -90,17 +103,7 @@ describe('profiler', () => { await processExitPromise(proc, 5000) const procEnd = BigInt(Date.now() * 1000000) - const dirEntries = await fs.readdir(cwd) - // Get the latest wall_*.pprof file - const pprofEntries = dirEntries.filter(name => /^wall_.+\.pprof$/.test(name)) - assert.isTrue(pprofEntries.length > 0, `No wall_*.pprof file found in ${cwd}`) - const pprofEntry = pprofEntries - .map(name => ({ name, modified: fsync.statSync(path.join(cwd, name), { bigint: true }).mtimeNs })) - .reduce((a, b) => a.modified > b.modified ? a : b) - .name - const pprofGzipped = await fs.readFile(path.join(cwd, pprofEntry)) - const pprofUnzipped = zlib.gunzipSync(pprofGzipped) - const prof = Profile.decode(pprofUnzipped) + const prof = await getLatestProfile(cwd, /^wall_.+\.pprof$/) // We check the profile for following invariants: // - every sample needs to have an 'end_timestamp_ns' label that has values (nanos since UNIX @@ -174,6 +177,76 @@ describe('profiler', () => { assert.equal(endpoints.size, 3) }) + it('dns timeline events work', async () => { + const procStart = BigInt(Date.now() * 1000000) + const proc = fork(path.join(cwd, 'profiler/dnstest.js'), { + cwd, + env: { + DD_PROFILING_PROFILERS: 'wall', + DD_PROFILING_EXPORTERS: 'file', + DD_PROFILING_ENABLED: 1, + DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED: 1 + } + }) + + await processExitPromise(proc, 5000) + const procEnd = BigInt(Date.now() * 1000000) + + const prof = await getLatestProfile(cwd, /^events_.+\.pprof$/) + assert.isAtLeast(prof.sample.length, 5) + + const strings = prof.stringTable + const tsKey = strings.dedup('end_timestamp_ns') + const eventKey = strings.dedup('event') + const hostKey = strings.dedup('host') + const addressKey = strings.dedup('address') + const threadNameKey = strings.dedup('thread name') + const nameKey = strings.dedup('operation name') + const threadNameValue = strings.dedup('Main DNS') + const dnsEventValue = strings.dedup('dns') + const dnsEvents = [] + for (const sample of prof.sample) { + let ts, event, host, address, name, threadName + for (const label of sample.label) { + switch (label.key) { + case tsKey: ts = label.num; break + case nameKey: name = label.str; break + case eventKey: event = label.str; break + case hostKey: host = label.str; break + case addressKey: address = label.str; break + case threadNameKey: threadName = label.str; break + default: assert.fail(`Unexpected label key ${strings.dedup(label.key)}`) + } + } + // Timestamp must be defined and be between process start and end time + assert.isDefined(ts) + assert.isTrue(ts <= procEnd) + assert.isTrue(ts >= procStart) + // Gather only DNS events; ignore sporadic GC events + if (event === dnsEventValue) { + // Thread name must be defined and exactly equal "Main DNS" + assert.equal(threadName, threadNameValue) + assert.isDefined(name) + // Exactly one of these is defined + assert.isTrue(!!address !== !!host) + const ev = { name: strings.strings[name] } + if (address) { + ev.address = strings.strings[address] + } else { + ev.host = strings.strings[host] + } + dnsEvents.push(ev) + } + } + assert.sameDeepMembers(dnsEvents, [ + { name: 'lookup', host: 'example.org' }, + { name: 'lookup', host: 'example.com' }, + { name: 'lookup', host: 'datadoghq.com' }, + { name: 'queryA', host: 'datadoghq.com' }, + { name: 'lookupService', address: '13.224.103.60:80' } + ]) + }) + context('shutdown', () => { beforeEach(async () => { agent = await new FakeAgent().start() diff --git a/integration-tests/profiler/dnstest.js b/integration-tests/profiler/dnstest.js new file mode 100644 index 00000000000..4af0f00750e --- /dev/null +++ b/integration-tests/profiler/dnstest.js @@ -0,0 +1,13 @@ +const dns = require('node:dns') + +require('dd-trace').init().profilerStarted().then(() => { + dns.lookupService('13.224.103.60', 80, () => {}) + dns.lookup('example.org', () => {}) + dns.lookup('example.com', () => {}) + dns.lookup('datadoghq.com', () => {}) + dns.resolve4('datadoghq.com', () => {}) + dns.lookup('dfslkgsjkrtgrdg.com', () => {}) +}) + +// Give the event processor chance to collect events +setTimeout(() => {}, 3000) diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index 83174c014de..4f51e5efdfb 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -14,7 +14,137 @@ const MS_TO_NS = 1000000 // perf_hooks events, the emitted pprof file uses the type "timeline". const pprofValueType = 'timeline' const pprofValueUnit = 'nanoseconds' -const threadName = `${threadNamePrefix} GC` + +function labelFromStr (stringTable, key, valStr) { + return new Label({ key, str: stringTable.dedup(valStr) }) +} + +function labelFromStrStr (stringTable, keyStr, valStr) { + return labelFromStr(stringTable, stringTable.dedup(keyStr), valStr) +} + +class GCDecorator { + constructor (stringTable) { + this.stringTable = stringTable + this.reasonLabelKey = stringTable.dedup('gc reason') + this.kindLabels = [] + this.reasonLabels = [] + this.flagObj = {} + + const kindLabelKey = stringTable.dedup('gc type') + + // Create labels for all GC performance flags and kinds of GC + for (const [key, value] of Object.entries(constants)) { + if (key.startsWith('NODE_PERFORMANCE_GC_FLAGS_')) { + this.flagObj[key.substring(26).toLowerCase()] = value + } else if (key.startsWith('NODE_PERFORMANCE_GC_')) { + // It's a constant for a kind of GC + const kind = key.substring(20).toLowerCase() + this.kindLabels[value] = labelFromStr(stringTable, kindLabelKey, kind) + } + } + } + + decorateSample (sampleInput, item) { + const { kind, flags } = node16 ? item.detail : item + sampleInput.label.push(this.kindLabels[kind]) + const reasonLabel = this.getReasonLabel(flags) + if (reasonLabel) { + sampleInput.label.push(reasonLabel) + } + } + + getReasonLabel (flags) { + if (flags === 0) { + return null + } + let reasonLabel = this.reasonLabels[flags] + if (!reasonLabel) { + const reasons = [] + for (const [key, value] of Object.entries(this.flagObj)) { + if (value & flags) { + reasons.push(key) + } + } + const reasonStr = reasons.join(',') + reasonLabel = labelFromStr(this.stringTable, this.reasonLabelKey, reasonStr) + this.reasonLabels[flags] = reasonLabel + } + return reasonLabel + } +} + +class DNSDecorator { + constructor (stringTable) { + this.stringTable = stringTable + this.operationNameLabelKey = stringTable.dedup('operation') + this.hostLabelKey = stringTable.dedup('host') + this.addressLabelKey = stringTable.dedup('address') + this.lanes = [] + } + + decorateSample (sampleInput, item) { + const labels = sampleInput.label + const stringTable = this.stringTable + function addLabel (labelNameKey, labelValue) { + labels.push(labelFromStr(stringTable, labelNameKey, labelValue)) + } + const op = item.name + addLabel(this.operationNameLabelKey, item.name) + const detail = item.detail + switch (op) { + case 'lookup': + addLabel(this.hostLabelKey, detail.hostname) + break + case 'lookupService': + addLabel(this.addressLabelKey, `${detail.host}:${detail.port}`) + break + case 'getHostByAddr': + addLabel(this.addressLabelKey, detail.host) + break + default: + if (op.startsWith('query')) { + addLabel(this.hostLabelKey, detail.host) + } + } + labels.push(this.getLaneLabelFor(item)) + } + + // Maintains "lanes" (or virtual threads) to avoid overlaps in events. The + // decorator starts out with no lanes, and dynamically adds them as needed. + // Every event is put in the first lane where it doesn't overlap with the last + // event in that lane. If there's no lane without overlaps, a new lane is + // created. + getLaneLabelFor (item) { + const startTime = item.startTime + const endTime = startTime + item.duration + + // Biases towards populating earlier lanes, but at least it's simple + for (const lane of this.lanes) { + if (lane.endTime <= startTime) { + lane.endTime = endTime + return lane.label + } + } + const label = labelFromStrStr( + this.stringTable, + THREAD_NAME, + `${threadNamePrefix} DNS-${this.lanes.length}` + ) + this.lanes.push({ endTime, label }) + return label + } +} + +// Keys correspond to PerformanceEntry.entryType, values are constructor +// functions for type-specific decorators. +const decoratorTypes = { + gc: GCDecorator +} +// Needs at least node 16 for DNS +if (node16) { + decoratorTypes.dns = DNSDecorator +} /** * This class generates pprof files with timeline events sourced from Node.js @@ -35,8 +165,7 @@ class EventsProfiler { if (!this._observer) { this._observer = new PerformanceObserver(add.bind(this)) } - // Currently only support GC - this._observer.observe({ entryTypes: ['gc'] }) + this._observer.observe({ entryTypes: Object.keys(decoratorTypes) }) } stop () { @@ -52,91 +181,55 @@ class EventsProfiler { } const stringTable = new StringTable() - const timestampLabelKey = stringTable.dedup(END_TIMESTAMP) - const kindLabelKey = stringTable.dedup('gc type') - const reasonLabelKey = stringTable.dedup('gc reason') - const kindLabels = [] - const reasonLabels = [] const locations = [] const functions = [] - const locationsPerKind = [] - const flagObj = {} - - function labelFromStr (key, valStr) { - return new Label({ key, str: stringTable.dedup(valStr) }) - } - - function labelFromStrStr (keyStr, valStr) { - return labelFromStr(stringTable.dedup(keyStr), valStr) - } - - // Create labels for all GC performance flags and kinds of GC - for (const [key, value] of Object.entries(constants)) { - if (key.startsWith('NODE_PERFORMANCE_GC_FLAGS_')) { - flagObj[key.substring(26).toLowerCase()] = value - } else if (key.startsWith('NODE_PERFORMANCE_GC_')) { - // It's a constant for a kind of GC - const kind = key.substring(20).toLowerCase() - kindLabels[value] = labelFromStr(kindLabelKey, kind) - // Construct a single-frame "location" too - const fn = new Function({ id: functions.length + 1, name: stringTable.dedup(`${kind} GC`) }) - functions.push(fn) - const line = new Line({ functionId: fn.id }) - const location = new Location({ id: locations.length + 1, line: [line] }) - locations.push(location) - locationsPerKind[value] = [location.id] - } - } - const gcEventLabel = labelFromStrStr('event', 'gc') - const threadLabel = labelFromStrStr(THREAD_NAME, threadName) + // A synthetic single-frame location to serve as the location for timeline + // samples. We need these as the profiling backend (mimicking official pprof + // tool's behavior) ignores these. + const locationId = (() => { + const fn = new Function({ id: functions.length + 1, name: stringTable.dedup('') }) + functions.push(fn) + const line = new Line({ functionId: fn.id }) + const location = new Location({ id: locations.length + 1, line: [line] }) + locations.push(location) + return [location.id] + })() - function getReasonLabel (flags) { - if (flags === 0) { - return null - } - let reasonLabel = reasonLabels[flags] - if (!reasonLabel) { - const reasons = [] - for (const [key, value] of Object.entries(flagObj)) { - if (value & flags) { - reasons.push(key) - } - } - const reasonStr = reasons.join(',') - reasonLabel = labelFromStr(reasonLabelKey, reasonStr) - reasonLabels[flags] = reasonLabel - } - return reasonLabel + const decorators = {} + for (const [eventType, DecoratorCtor] of Object.entries(decoratorTypes)) { + const decorator = new DecoratorCtor(stringTable) + decorator.eventTypeLabel = labelFromStrStr(stringTable, 'event', eventType) + decorators[eventType] = decorator } + const timestampLabelKey = stringTable.dedup(END_TIMESTAMP) let durationFrom = Number.POSITIVE_INFINITY let durationTo = 0 const dateOffset = BigInt(Math.round(performance.timeOrigin * MS_TO_NS)) const samples = this.entries.map((item) => { + const decorator = decorators[item.entryType] + if (!decorator) { + // Shouldn't happen but it's better to not rely on observer only getting + // requested event types. + return null + } const { startTime, duration } = item - const { kind, flags } = node16 ? item.detail : item const endTime = startTime + duration if (durationFrom > startTime) durationFrom = startTime if (durationTo < endTime) durationTo = endTime - const labels = [ - gcEventLabel, - threadLabel, - new Label({ key: timestampLabelKey, num: dateOffset + BigInt(Math.round(endTime * MS_TO_NS)) }), - kindLabels[kind] - ] - const reasonLabel = getReasonLabel(flags) - if (reasonLabel) { - labels.push(reasonLabel) - } - const sample = new Sample({ + const sampleInput = { value: [Math.round(duration * MS_TO_NS)], - label: labels, - locationId: locationsPerKind[kind] - }) - return sample - }) + locationId, + label: [ + decorator.eventTypeLabel, + new Label({ key: timestampLabelKey, num: dateOffset + BigInt(Math.round(endTime * MS_TO_NS)) }) + ] + } + decorator.decorateSample(sampleInput, item) + return new Sample(sampleInput) + }).filter(v => v) this.entries = [] From 9d2fc7e60967fd1642aced42991935d20d102ffa Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Fri, 1 Dec 2023 12:28:15 -0800 Subject: [PATCH 092/147] configurable tracing header w/ aws signed request (#3836) - adds a new `http` service configuration option `enablePropagationWithAmazonHeaders` - defaults to `false` which is current behavior - when set to `true` it will allow injecting tracing headers for requests signed via AWS IAM headers - allows customer applications which communicate across services with signed requests to be traced - making this a configurable since there may be use-cases which the change could break --- docs/test.ts | 3 ++ index.d.ts | 8 ++++ packages/datadog-plugin-http/src/client.js | 14 +++++- .../datadog-plugin-http/test/client.spec.js | 46 +++++++++++++++++++ .../datadog-plugin-openai/test/index.spec.js | 2 +- 5 files changed, 71 insertions(+), 2 deletions(-) diff --git a/docs/test.ts b/docs/test.ts index dab49394a81..3c9342a0bba 100644 --- a/docs/test.ts +++ b/docs/test.ts @@ -292,6 +292,9 @@ tracer.use('http', { tracer.use('http', { client: httpClientOptions }); +tracer.use('http', { + enablePropagationWithAmazonHeaders: true +}); tracer.use('http2'); tracer.use('http2', { server: http2ServerOptions diff --git a/index.d.ts b/index.d.ts index 0a7c859f51d..f84328ea25c 100644 --- a/index.d.ts +++ b/index.d.ts @@ -943,6 +943,14 @@ declare namespace plugins { * @default code => code < 500 */ validateStatus?: (code: number) => boolean; + + /** + * Enable injection of tracing headers into requests signed with AWS IAM headers. + * Disable this if you get AWS signature errors (HTTP 403). + * + * @default false + */ + enablePropagationWithAmazonHeaders?: boolean; } /** @hidden */ diff --git a/packages/datadog-plugin-http/src/client.js b/packages/datadog-plugin-http/src/client.js index 8ea210a0ba9..441290c1199 100644 --- a/packages/datadog-plugin-http/src/client.js +++ b/packages/datadog-plugin-http/src/client.js @@ -58,7 +58,7 @@ class HttpClientPlugin extends ClientPlugin { span._spanContext._trace.record = false } - if (!(hasAmazonSignature(options) || !this.config.propagationFilter(uri))) { + if (this.shouldInjectTraceHeaders(options, uri)) { this.tracer.inject(span, HTTP_HEADERS, options.headers) } @@ -71,6 +71,18 @@ class HttpClientPlugin extends ClientPlugin { return message.currentStore } + shouldInjectTraceHeaders (options, uri) { + if (hasAmazonSignature(options) && !this.config.enablePropagationWithAmazonHeaders) { + return false + } + + if (!this.config.propagationFilter(uri)) { + return false + } + + return true + } + bindAsyncStart ({ parentStore }) { return parentStore } diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index ca2a89cf67d..201a7c33418 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -1052,6 +1052,52 @@ describe('Plugin', () => { }) }) + describe('with config enablePropagationWithAmazonHeaders enabled', () => { + let config + + beforeEach(() => { + config = { + enablePropagationWithAmazonHeaders: true + } + + return agent.load('http', config) + .then(() => { + http = require(protocol) + express = require('express') + }) + }) + + it('should inject tracing header into AWS signed request', done => { + const app = express() + + app.get('/', (req, res) => { + try { + expect(req.get('x-datadog-trace-id')).to.be.a('string') + expect(req.get('x-datadog-parent-id')).to.be.a('string') + + res.status(200).send() + + done() + } catch (e) { + done(e) + } + }) + + getPort().then(port => { + appListener = server(app, port, () => { + const req = http.request({ + port, + headers: { + Authorization: 'AWS4-HMAC-SHA256 ...' + } + }) + + req.end() + }) + }) + }) + }) + describe('with validateStatus configuration', () => { let config diff --git a/packages/datadog-plugin-openai/test/index.spec.js b/packages/datadog-plugin-openai/test/index.spec.js index 4da2caaaad7..ccf586645c0 100644 --- a/packages/datadog-plugin-openai/test/index.spec.js +++ b/packages/datadog-plugin-openai/test/index.spec.js @@ -181,7 +181,7 @@ describe('Plugin', () => { 'error:0' ] - expect(metricStub).to.have.been.calledWith('openai.request.duration', 0, 'd', expectedTags) + expect(metricStub).to.have.been.calledWith('openai.request.duration') // timing value not guaranteed expect(metricStub).to.have.been.calledWith('openai.tokens.prompt', 3, 'd', expectedTags) expect(metricStub).to.have.been.calledWith('openai.tokens.completion', 16, 'd', expectedTags) expect(metricStub).to.have.been.calledWith('openai.tokens.total', 19, 'd', expectedTags) From 626afefd00bb26b197a3df2223380ea4e582939c Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Fri, 1 Dec 2023 15:40:57 -0500 Subject: [PATCH 093/147] [core] Do Not Report HTTP Requests Over 5 Seconds as Errors on Node 20 (#3841) [core] Do Not Report HTTP Requests Over 5 Seconds as Errors on Node 20 --- .../src/http/client.js | 10 ++ packages/datadog-plugin-http/src/client.js | 7 +- .../datadog-plugin-http/test/client.spec.js | 98 +++++++++++++++++++ 3 files changed, 114 insertions(+), 1 deletion(-) diff --git a/packages/datadog-instrumentations/src/http/client.js b/packages/datadog-instrumentations/src/http/client.js index e6a33fd7905..fcf5cc05f0a 100644 --- a/packages/datadog-instrumentations/src/http/client.js +++ b/packages/datadog-instrumentations/src/http/client.js @@ -58,6 +58,7 @@ function patch (http, methodName) { } const options = args.options + const finish = () => { if (!finished) { finished = true @@ -68,9 +69,17 @@ function patch (http, methodName) { try { const req = request.call(this, options, callback) const emit = req.emit + const setTimeout = req.setTimeout ctx.req = req + // tracked to accurately discern custom request socket timeout + let customRequestTimeout = false + req.setTimeout = function () { + customRequestTimeout = true + return setTimeout.apply(this, arguments) + } + req.emit = function (eventName, arg) { switch (eventName) { case 'response': { @@ -88,6 +97,7 @@ function patch (http, methodName) { case 'error': case 'timeout': ctx.error = arg + ctx.customRequestTimeout = customRequestTimeout errorChannel.publish(ctx) case 'abort': // deprecated and replaced by `close` in node 17 case 'close': diff --git a/packages/datadog-plugin-http/src/client.js b/packages/datadog-plugin-http/src/client.js index 441290c1199..42833bb896f 100644 --- a/packages/datadog-plugin-http/src/client.js +++ b/packages/datadog-plugin-http/src/client.js @@ -110,7 +110,7 @@ class HttpClientPlugin extends ClientPlugin { span.finish() } - error ({ span, error }) { + error ({ span, error, args, customRequestTimeout }) { if (!span) return if (error) { span.addTags({ @@ -119,6 +119,11 @@ class HttpClientPlugin extends ClientPlugin { [ERROR_STACK]: error.stack }) } else { + // conditions for no error: + // 1. not using a custom agent instance with custom timeout specified + // 2. no invocation of `req.setTimeout` + if (!args.options.agent?.options.timeout && !customRequestTimeout) return + span.setTag('error', 1) } } diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index 201a7c33418..7256950ac83 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -12,6 +12,7 @@ const cert = fs.readFileSync(path.join(__dirname, './ssl/test.crt')) const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') const { DD_MAJOR } = require('../../../version') const { rawExpectedSchema } = require('./naming') +const { satisfies } = require('semver') const HTTP_REQUEST_HEADERS = tags.HTTP_REQUEST_HEADERS const HTTP_RESPONSE_HEADERS = tags.HTTP_RESPONSE_HEADERS @@ -804,6 +805,103 @@ describe('Plugin', () => { }) }) + if (satisfies(process.version, '>=20')) { + it('should not record default HTTP agent timeout as error with Node 20', done => { + const app = express() + + app.get('/user', async (req, res) => { + await new Promise(resolve => { + setTimeout(resolve, 6 * 1000) // over 5s default + }) + res.status(200).send() + }) + + getPort().then(port => { + agent + .use(traces => { + expect(traces[0][0]).to.have.property('error', 0) + }) + .then(done) + .catch(done) + + appListener = server(app, port, async () => { + const req = http.request(`${protocol}://localhost:${port}/user`, res => { + res.on('data', () => { }) + }) + + req.on('error', () => {}) + + req.end() + }) + }) + }).timeout(10000) + + it('should record error if custom Agent timeout is used with Node 20', done => { + const app = express() + + app.get('/user', async (req, res) => { + await new Promise(resolve => { + setTimeout(resolve, 6 * 1000) + }) + res.status(200).send() + }) + + getPort().then(port => { + agent + .use(traces => { + expect(traces[0][0]).to.have.property('error', 1) + }) + .then(done) + .catch(done) + + const options = { + agent: new http.Agent({ keepAlive: true, timeout: 5000 }) // custom agent with same default timeout + } + + appListener = server(app, port, async () => { + const req = http.request(`${protocol}://localhost:${port}/user`, options, res => { + res.on('data', () => { }) + }) + + req.on('error', () => {}) + + req.end() + }) + }) + }).timeout(10000) + + it('should record error if req.setTimeout is used with Node 20', done => { + const app = express() + + app.get('/user', async (req, res) => { + await new Promise(resolve => { + setTimeout(resolve, 6 * 1000) + }) + res.status(200).send() + }) + + getPort().then(port => { + agent + .use(traces => { + expect(traces[0][0]).to.have.property('error', 1) + }) + .then(done) + .catch(done) + + appListener = server(app, port, async () => { + const req = http.request(`${protocol}://localhost:${port}/user`, res => { + res.on('data', () => { }) + }) + + req.on('error', () => {}) + req.setTimeout(5000) // match default timeout + + req.end() + }) + }) + }).timeout(10000) + } + it('should only record a request once', done => { // Make sure both plugins are loaded, which could cause double-counting. require('http') From 154ca9a6c38bafcc89dffb859e6da4905cf98eb5 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Tue, 5 Dec 2023 10:05:56 +0100 Subject: [PATCH 094/147] Fix integ tests for DNS (#3844) --- integration-tests/profiler.spec.js | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index f1d4f147bc5..90482df58a4 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -201,8 +201,7 @@ describe('profiler', () => { const hostKey = strings.dedup('host') const addressKey = strings.dedup('address') const threadNameKey = strings.dedup('thread name') - const nameKey = strings.dedup('operation name') - const threadNameValue = strings.dedup('Main DNS') + const nameKey = strings.dedup('operation') const dnsEventValue = strings.dedup('dns') const dnsEvents = [] for (const sample of prof.sample) { @@ -215,7 +214,7 @@ describe('profiler', () => { case hostKey: host = label.str; break case addressKey: address = label.str; break case threadNameKey: threadName = label.str; break - default: assert.fail(`Unexpected label key ${strings.dedup(label.key)}`) + default: assert.fail(`Unexpected label key ${label.key} ${strings.strings[label.key]}`) } } // Timestamp must be defined and be between process start and end time @@ -225,7 +224,7 @@ describe('profiler', () => { // Gather only DNS events; ignore sporadic GC events if (event === dnsEventValue) { // Thread name must be defined and exactly equal "Main DNS" - assert.equal(threadName, threadNameValue) + assert.isTrue(strings.strings[threadName].startsWith('Main DNS-')) assert.isDefined(name) // Exactly one of these is defined assert.isTrue(!!address !== !!host) From 1aa2899b532140344b0161f13599be4c11b19902 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Tue, 5 Dec 2023 16:29:52 +0100 Subject: [PATCH 095/147] Detect header injection code vulnerability (#3813) --- .../src/appsec/iast/analyzers/analyzers.js | 1 + .../analyzers/header-injection-analyzer.js | 96 ++ .../vulnerabilities-formatter/constants.js | 7 + .../command-sensitive-analyzer.js | 31 +- .../header-sensitive-analyzer.js | 20 + .../json-sensitive-analyzer.js | 16 +- .../ldap-sensitive-analyzer.js | 43 +- .../sql-sensitive-analyzer.js | 164 ++- .../url-sensitive-analyzer.js | 63 +- .../evidence-redaction/sensitive-handler.js | 25 +- .../src/appsec/iast/vulnerabilities.js | 1 + .../header-injection.express.plugin.spec.js | 228 ++++ .../resources/set-header-function.js | 7 + packages/dd-trace/test/appsec/iast/utils.js | 17 +- .../vulnerability-formatter/index.spec.js | 21 +- .../resources/evidence-redaction-suite.json | 1190 ++++++++++++++--- 16 files changed, 1571 insertions(+), 359 deletions(-) create mode 100644 packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js create mode 100644 packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/constants.js create mode 100644 packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/header-sensitive-analyzer.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/resources/set-header-function.js diff --git a/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js b/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js index 62933baa24d..7152d07458f 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js @@ -3,6 +3,7 @@ module.exports = { 'COMMAND_INJECTION_ANALYZER': require('./command-injection-analyzer'), 'HARCODED_SECRET_ANALYZER': require('./hardcoded-secret-analyzer'), + 'HEADER_INJECTION_ANALYZER': require('./header-injection-analyzer'), 'HSTS_HEADER_MISSING_ANALYZER': require('./hsts-header-missing-analyzer'), 'INSECURE_COOKIE_ANALYZER': require('./insecure-cookie-analyzer'), 'LDAP_ANALYZER': require('./ldap-injection-analyzer'), diff --git a/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js new file mode 100644 index 00000000000..73ac404f5a5 --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js @@ -0,0 +1,96 @@ +'use strict' + +const InjectionAnalyzer = require('./injection-analyzer') +const { HEADER_INJECTION } = require('../vulnerabilities') +const { getNodeModulesPaths } = require('../path-line') +const { HEADER_NAME_VALUE_SEPARATOR } = require('../vulnerabilities-formatter/constants') +const { getRanges } = require('../taint-tracking/operations') +const { + HTTP_REQUEST_COOKIE_NAME, + HTTP_REQUEST_COOKIE_VALUE, + HTTP_REQUEST_HEADER_VALUE +} = require('../taint-tracking/source-types') + +const EXCLUDED_PATHS = getNodeModulesPaths('express') +const EXCLUDED_HEADER_NAMES = [ + 'location', + 'sec-websocket-location', + 'sec-websocket-accept', + 'upgrade', + 'connection' +] + +class HeaderInjectionAnalyzer extends InjectionAnalyzer { + constructor () { + super(HEADER_INJECTION) + } + + onConfigure () { + this.addSub('datadog:http:server:response:set-header:finish', ({ name, value }) => { + if (Array.isArray(value)) { + for (let i = 0; i < value.length; i++) { + const headerValue = value[i] + + this.analyze({ name, value: headerValue }) + } + } else { + this.analyze({ name, value }) + } + }) + } + + _isVulnerable ({ name, value }, iastContext) { + const lowerCasedHeaderName = name?.trim().toLowerCase() + + if (this.isExcludedHeaderName(lowerCasedHeaderName) || typeof value !== 'string') return + + return super._isVulnerable(value, iastContext) && + !(this.isCookieExclusion(lowerCasedHeaderName, value, iastContext) || + this.isAccessControlAllowOriginExclusion(lowerCasedHeaderName, value, iastContext)) + } + + _getEvidence (headerInfo, iastContext) { + const prefix = headerInfo.name + HEADER_NAME_VALUE_SEPARATOR + const prefixLength = prefix.length + + const evidence = super._getEvidence(headerInfo.value, iastContext) + evidence.value = prefix + evidence.value + evidence.ranges = evidence.ranges.map(range => { + return { + ...range, + start: range.start + prefixLength, + end: range.end + prefixLength + } + }) + + return evidence + } + + isExcludedHeaderName (name) { + return EXCLUDED_HEADER_NAMES.includes(name) + } + + isCookieExclusion (name, value, iastContext) { + if (name === 'set-cookie') { + return getRanges(iastContext, value) + .every(range => range.iinfo.type === HTTP_REQUEST_COOKIE_VALUE || range.iinfo.type === HTTP_REQUEST_COOKIE_NAME) + } + + return false + } + + isAccessControlAllowOriginExclusion (name, value, iastContext) { + if (name === 'access-control-allow-origin') { + return getRanges(iastContext, value) + .every(range => range.iinfo.type === HTTP_REQUEST_HEADER_VALUE) + } + + return false + } + + _getExcludedPaths () { + return EXCLUDED_PATHS + } +} + +module.exports = new HeaderInjectionAnalyzer() diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/constants.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/constants.js new file mode 100644 index 00000000000..4183de0fd97 --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/constants.js @@ -0,0 +1,7 @@ +'use strict' + +const HEADER_NAME_VALUE_SEPARATOR = ': ' + +module.exports = { + HEADER_NAME_VALUE_SEPARATOR +} diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/command-sensitive-analyzer.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/command-sensitive-analyzer.js index 39b38a79696..abf341a1a1f 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/command-sensitive-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/command-sensitive-analyzer.js @@ -3,27 +3,20 @@ const iastLog = require('../../../iast-log') const COMMAND_PATTERN = '^(?:\\s*(?:sudo|doas)\\s+)?\\b\\S+\\b\\s(.*)' +const pattern = new RegExp(COMMAND_PATTERN, 'gmi') -class CommandSensitiveAnalyzer { - constructor () { - this._pattern = new RegExp(COMMAND_PATTERN, 'gmi') - } - - extractSensitiveRanges (evidence) { - try { - this._pattern.lastIndex = 0 +module.exports = function extractSensitiveRanges (evidence) { + try { + pattern.lastIndex = 0 - const regexResult = this._pattern.exec(evidence.value) - if (regexResult && regexResult.length > 1) { - const start = regexResult.index + (regexResult[0].length - regexResult[1].length) - const end = start + regexResult[1].length - return [{ start, end }] - } - } catch (e) { - iastLog.debug(e) + const regexResult = pattern.exec(evidence.value) + if (regexResult && regexResult.length > 1) { + const start = regexResult.index + (regexResult[0].length - regexResult[1].length) + const end = start + regexResult[1].length + return [{ start, end }] } - return [] + } catch (e) { + iastLog.debug(e) } + return [] } - -module.exports = CommandSensitiveAnalyzer diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/header-sensitive-analyzer.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/header-sensitive-analyzer.js new file mode 100644 index 00000000000..73642c62e6d --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/header-sensitive-analyzer.js @@ -0,0 +1,20 @@ +'use strict' + +const { HEADER_NAME_VALUE_SEPARATOR } = require('../../constants') + +module.exports = function extractSensitiveRanges (evidence, namePattern, valuePattern) { + const evidenceValue = evidence.value + const sections = evidenceValue.split(HEADER_NAME_VALUE_SEPARATOR) + const headerName = sections[0] + const headerValue = sections.slice(1).join(HEADER_NAME_VALUE_SEPARATOR) + namePattern.lastIndex = 0 + valuePattern.lastIndex = 0 + if (namePattern.test(headerName) || valuePattern.test(headerValue)) { + return [{ + start: headerName.length + HEADER_NAME_VALUE_SEPARATOR.length, + end: evidenceValue.length + }] + } + + return [] +} diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/json-sensitive-analyzer.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/json-sensitive-analyzer.js index 18efa8081b8..a3020c65c0a 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/json-sensitive-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/json-sensitive-analyzer.js @@ -2,15 +2,11 @@ const { stringifyWithRanges } = require('../../utils') -class JsonSensitiveAnalyzer { - extractSensitiveRanges (evidence) { - // expect object evidence - const { value, ranges, sensitiveRanges } = stringifyWithRanges(evidence.value, evidence.rangesToApply, true) - evidence.value = value - evidence.ranges = ranges +module.exports = function extractSensitiveRanges (evidence) { + // expect object evidence + const { value, ranges, sensitiveRanges } = stringifyWithRanges(evidence.value, evidence.rangesToApply, true) + evidence.value = value + evidence.ranges = ranges - return sensitiveRanges - } + return sensitiveRanges } - -module.exports = JsonSensitiveAnalyzer diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/ldap-sensitive-analyzer.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/ldap-sensitive-analyzer.js index 087d2c5f8f6..93497465afe 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/ldap-sensitive-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/ldap-sensitive-analyzer.js @@ -3,33 +3,26 @@ const iastLog = require('../../../iast-log') const LDAP_PATTERN = '\\(.*?(?:~=|=|<=|>=)(?[^)]+)\\)' +const pattern = new RegExp(LDAP_PATTERN, 'gmi') -class LdapSensitiveAnalyzer { - constructor () { - this._pattern = new RegExp(LDAP_PATTERN, 'gmi') - } - - extractSensitiveRanges (evidence) { - try { - this._pattern.lastIndex = 0 - const tokens = [] +module.exports = function extractSensitiveRanges (evidence) { + try { + pattern.lastIndex = 0 + const tokens = [] - let regexResult = this._pattern.exec(evidence.value) - while (regexResult != null) { - if (!regexResult.groups.LITERAL) continue - // Computing indices manually since NodeJs 12 does not support d flag on regular expressions - // TODO Get indices from group by adding d flag in regular expression - const start = regexResult.index + (regexResult[0].length - regexResult.groups.LITERAL.length - 1) - const end = start + regexResult.groups.LITERAL.length - tokens.push({ start, end }) - regexResult = this._pattern.exec(evidence.value) - } - return tokens - } catch (e) { - iastLog.debug(e) + let regexResult = pattern.exec(evidence.value) + while (regexResult != null) { + if (!regexResult.groups.LITERAL) continue + // Computing indices manually since NodeJs 12 does not support d flag on regular expressions + // TODO Get indices from group by adding d flag in regular expression + const start = regexResult.index + (regexResult[0].length - regexResult.groups.LITERAL.length - 1) + const end = start + regexResult.groups.LITERAL.length + tokens.push({ start, end }) + regexResult = pattern.exec(evidence.value) } - return [] + return tokens + } catch (e) { + iastLog.debug(e) } + return [] } - -module.exports = LdapSensitiveAnalyzer diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/sql-sensitive-analyzer.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/sql-sensitive-analyzer.js index bf23449cec7..88056f93168 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/sql-sensitive-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/sql-sensitive-analyzer.js @@ -23,96 +23,90 @@ const NUMERIC_LITERAL = })` const ORACLE_ESCAPED_LITERAL = 'q\'<.*?>\'|q\'\\(.*?\\)\'|q\'\\{.*?\\}\'|q\'\\[.*?\\]\'|q\'(?.).*?\\k\'' -class SqlSensitiveAnalyzer { - constructor () { - this._patterns = { - ANSI: new RegExp( // Default - [ - NUMERIC_LITERAL, - STRING_LITERAL, - LINE_COMMENT, - BLOCK_COMMENT - ].join('|'), - 'gmi' - ), - MYSQL: new RegExp( - [ - NUMERIC_LITERAL, - MYSQL_STRING_LITERAL, - LINE_COMMENT, - BLOCK_COMMENT - ].join('|'), - 'gmi' - ), - POSTGRES: new RegExp( - [ - NUMERIC_LITERAL, - POSTGRESQL_ESCAPED_LITERAL, - STRING_LITERAL, - LINE_COMMENT, - BLOCK_COMMENT - ].join('|'), - 'gmi' - ), - ORACLE: new RegExp([ - NUMERIC_LITERAL, - ORACLE_ESCAPED_LITERAL, - STRING_LITERAL, - LINE_COMMENT, - BLOCK_COMMENT - ].join('|'), - 'gmi') - } - this._patterns.SQLITE = this._patterns.MYSQL - this._patterns.MARIADB = this._patterns.MYSQL - } +const patterns = { + ANSI: new RegExp( // Default + [ + NUMERIC_LITERAL, + STRING_LITERAL, + LINE_COMMENT, + BLOCK_COMMENT + ].join('|'), + 'gmi' + ), + MYSQL: new RegExp( + [ + NUMERIC_LITERAL, + MYSQL_STRING_LITERAL, + LINE_COMMENT, + BLOCK_COMMENT + ].join('|'), + 'gmi' + ), + POSTGRES: new RegExp( + [ + NUMERIC_LITERAL, + POSTGRESQL_ESCAPED_LITERAL, + STRING_LITERAL, + LINE_COMMENT, + BLOCK_COMMENT + ].join('|'), + 'gmi' + ), + ORACLE: new RegExp([ + NUMERIC_LITERAL, + ORACLE_ESCAPED_LITERAL, + STRING_LITERAL, + LINE_COMMENT, + BLOCK_COMMENT + ].join('|'), + 'gmi') +} +patterns.SQLITE = patterns.MYSQL +patterns.MARIADB = patterns.MYSQL - extractSensitiveRanges (evidence) { - try { - let pattern = this._patterns[evidence.dialect] - if (!pattern) { - pattern = this._patterns['ANSI'] - } - pattern.lastIndex = 0 - const tokens = [] +module.exports = function extractSensitiveRanges (evidence) { + try { + let pattern = patterns[evidence.dialect] + if (!pattern) { + pattern = patterns['ANSI'] + } + pattern.lastIndex = 0 + const tokens = [] - let regexResult = pattern.exec(evidence.value) - while (regexResult != null) { - let start = regexResult.index - let end = regexResult.index + regexResult[0].length - const startChar = evidence.value.charAt(start) - if (startChar === '\'' || startChar === '"') { - start++ - end-- - } else if (end > start + 1) { - const nextChar = evidence.value.charAt(start + 1) - if (startChar === '/' && nextChar === '*') { - start += 2 - end -= 2 - } else if (startChar === '-' && startChar === nextChar) { - start += 2 - } else if (startChar.toLowerCase() === 'q' && nextChar === '\'') { - start += 3 - end -= 2 - } else if (startChar === '$') { - const match = regexResult[0] - const size = match.indexOf('$', 1) + 1 - if (size > 1) { - start += size - end -= size - } + let regexResult = pattern.exec(evidence.value) + while (regexResult != null) { + let start = regexResult.index + let end = regexResult.index + regexResult[0].length + const startChar = evidence.value.charAt(start) + if (startChar === '\'' || startChar === '"') { + start++ + end-- + } else if (end > start + 1) { + const nextChar = evidence.value.charAt(start + 1) + if (startChar === '/' && nextChar === '*') { + start += 2 + end -= 2 + } else if (startChar === '-' && startChar === nextChar) { + start += 2 + } else if (startChar.toLowerCase() === 'q' && nextChar === '\'') { + start += 3 + end -= 2 + } else if (startChar === '$') { + const match = regexResult[0] + const size = match.indexOf('$', 1) + 1 + if (size > 1) { + start += size + end -= size } } - - tokens.push({ start, end }) - regexResult = pattern.exec(evidence.value) } - return tokens - } catch (e) { - iastLog.debug(e) + + tokens.push({ start, end }) + regexResult = pattern.exec(evidence.value) } - return [] + return tokens + } catch (e) { + iastLog.debug(e) } + return [] } - -module.exports = SqlSensitiveAnalyzer diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/url-sensitive-analyzer.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/url-sensitive-analyzer.js index 414f1b684dc..6f43008d2c3 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/url-sensitive-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-analyzers/url-sensitive-analyzer.js @@ -4,46 +4,37 @@ const iastLog = require('../../../iast-log') const AUTHORITY = '^(?:[^:]+:)?//([^@]+)@' const QUERY_FRAGMENT = '[?#&]([^=&;]+)=([^?#&]+)' +const pattern = new RegExp([AUTHORITY, QUERY_FRAGMENT].join('|'), 'gmi') + +module.exports = function extractSensitiveRanges (evidence) { + try { + const ranges = [] + let regexResult = pattern.exec(evidence.value) + + while (regexResult != null) { + if (typeof regexResult[1] === 'string') { + // AUTHORITY regex match always ends by group + @ + // it means that the match last chars - 1 are always the group + const end = regexResult.index + (regexResult[0].length - 1) + const start = end - regexResult[1].length + ranges.push({ start, end }) + } -class UrlSensitiveAnalyzer { - constructor () { - this._pattern = new RegExp([AUTHORITY, QUERY_FRAGMENT].join('|'), 'gmi') - } - - extractSensitiveRanges (evidence) { - try { - const pattern = this._pattern - - const ranges = [] - let regexResult = pattern.exec(evidence.value) - - while (regexResult != null) { - if (typeof regexResult[1] === 'string') { - // AUTHORITY regex match always ends by group + @ - // it means that the match last chars - 1 are always the group - const end = regexResult.index + (regexResult[0].length - 1) - const start = end - regexResult[1].length - ranges.push({ start, end }) - } - - if (typeof regexResult[3] === 'string') { - // QUERY_FRAGMENT regex always ends with the group - // it means that the match last chars are always the group - const end = regexResult.index + regexResult[0].length - const start = end - regexResult[3].length - ranges.push({ start, end }) - } - - regexResult = pattern.exec(evidence.value) + if (typeof regexResult[3] === 'string') { + // QUERY_FRAGMENT regex always ends with the group + // it means that the match last chars are always the group + const end = regexResult.index + regexResult[0].length + const start = end - regexResult[3].length + ranges.push({ start, end }) } - return ranges - } catch (e) { - iastLog.debug(e) + regexResult = pattern.exec(evidence.value) } - return [] + return ranges + } catch (e) { + iastLog.debug(e) } -} -module.exports = UrlSensitiveAnalyzer + return [] +} diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js index 4641876e934..b2ae03e4bb6 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js @@ -5,11 +5,12 @@ const vulnerabilities = require('../../vulnerabilities') const { contains, intersects, remove } = require('./range-utils') -const CommandSensitiveAnalyzer = require('./sensitive-analyzers/command-sensitive-analyzer') -const JsonSensitiveAnalyzer = require('./sensitive-analyzers/json-sensitive-analyzer') -const LdapSensitiveAnalyzer = require('./sensitive-analyzers/ldap-sensitive-analyzer') -const SqlSensitiveAnalyzer = require('./sensitive-analyzers/sql-sensitive-analyzer') -const UrlSensitiveAnalyzer = require('./sensitive-analyzers/url-sensitive-analyzer') +const commandSensitiveAnalyzer = require('./sensitive-analyzers/command-sensitive-analyzer') +const headerSensitiveAnalyzer = require('./sensitive-analyzers/header-sensitive-analyzer') +const jsonSensitiveAnalyzer = require('./sensitive-analyzers/json-sensitive-analyzer') +const ldapSensitiveAnalyzer = require('./sensitive-analyzers/ldap-sensitive-analyzer') +const sqlSensitiveAnalyzer = require('./sensitive-analyzers/sql-sensitive-analyzer') +const urlSensitiveAnalyzer = require('./sensitive-analyzers/url-sensitive-analyzer') const { DEFAULT_IAST_REDACTION_NAME_PATTERN, DEFAULT_IAST_REDACTION_VALUE_PATTERN } = require('./sensitive-regex') @@ -21,13 +22,15 @@ class SensitiveHandler { this._valuePattern = new RegExp(DEFAULT_IAST_REDACTION_VALUE_PATTERN, 'gmi') this._sensitiveAnalyzers = new Map() - this._sensitiveAnalyzers.set(vulnerabilities.COMMAND_INJECTION, new CommandSensitiveAnalyzer()) - this._sensitiveAnalyzers.set(vulnerabilities.NOSQL_MONGODB_INJECTION, new JsonSensitiveAnalyzer()) - this._sensitiveAnalyzers.set(vulnerabilities.LDAP_INJECTION, new LdapSensitiveAnalyzer()) - this._sensitiveAnalyzers.set(vulnerabilities.SQL_INJECTION, new SqlSensitiveAnalyzer()) - const urlSensitiveAnalyzer = new UrlSensitiveAnalyzer() + this._sensitiveAnalyzers.set(vulnerabilities.COMMAND_INJECTION, commandSensitiveAnalyzer) + this._sensitiveAnalyzers.set(vulnerabilities.NOSQL_MONGODB_INJECTION, jsonSensitiveAnalyzer) + this._sensitiveAnalyzers.set(vulnerabilities.LDAP_INJECTION, ldapSensitiveAnalyzer) + this._sensitiveAnalyzers.set(vulnerabilities.SQL_INJECTION, sqlSensitiveAnalyzer) this._sensitiveAnalyzers.set(vulnerabilities.SSRF, urlSensitiveAnalyzer) this._sensitiveAnalyzers.set(vulnerabilities.UNVALIDATED_REDIRECT, urlSensitiveAnalyzer) + this._sensitiveAnalyzers.set(vulnerabilities.HEADER_INJECTION, (evidence) => { + return headerSensitiveAnalyzer(evidence, this._namePattern, this._valuePattern) + }) } isSensibleName (name) { @@ -47,7 +50,7 @@ class SensitiveHandler { scrubEvidence (vulnerabilityType, evidence, sourcesIndexes, sources) { const sensitiveAnalyzer = this._sensitiveAnalyzers.get(vulnerabilityType) if (sensitiveAnalyzer) { - const sensitiveRanges = sensitiveAnalyzer.extractSensitiveRanges(evidence) + const sensitiveRanges = sensitiveAnalyzer(evidence) return this.toRedactedJson(evidence, sensitiveRanges, sourcesIndexes, sources) } return null diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities.js b/packages/dd-trace/src/appsec/iast/vulnerabilities.js index 1815bd4e201..a248b50c632 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities.js @@ -1,6 +1,7 @@ module.exports = { COMMAND_INJECTION: 'COMMAND_INJECTION', HARDCODED_SECRET: 'HARDCODED_SECRET', + HEADER_INJECTION: 'HEADER_INJECTION', HSTS_HEADER_MISSING: 'HSTS_HEADER_MISSING', INSECURE_COOKIE: 'INSECURE_COOKIE', LDAP_INJECTION: 'LDAP_INJECTION', diff --git a/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js new file mode 100644 index 00000000000..3d825997654 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js @@ -0,0 +1,228 @@ +'use strict' + +const axios = require('axios') +const fs = require('fs') +const os = require('os') +const path = require('path') +const { prepareTestServerForIastInExpress } = require('../utils') + +describe('Header injection vulnerability', () => { + let setHeaderFunction + const setHeaderFunctionFilename = 'set-header-function.js' + const setHeaderFunctionPath = path.join(os.tmpdir(), setHeaderFunctionFilename) + + before(() => { + fs.copyFileSync(path.join(__dirname, 'resources', 'set-header-function.js'), setHeaderFunctionPath) + setHeaderFunction = require(setHeaderFunctionPath).setHeader + }) + + after(() => { + fs.unlinkSync(setHeaderFunctionPath) + }) + + withVersions('express', 'express', version => { + prepareTestServerForIastInExpress('in express', version, + (testThatRequestHasVulnerability, testThatRequestHasNoVulnerability) => { + testThatRequestHasVulnerability({ + fn: (req, res) => { + setHeaderFunction('custom', req.body.test, res) + }, + vulnerability: 'HEADER_INJECTION', + occurrencesAndLocation: { + occurrences: 1, + location: { + path: setHeaderFunctionFilename, + line: 4 + } + }, + cb: (headerInjectionVulnerabilities) => { + const evidenceString = headerInjectionVulnerabilities[0].evidence.valueParts + .map(part => part.value).join('') + expect(evidenceString).to.be.equal('custom: value') + }, + makeRequest: (done, config) => { + return axios.post(`http://localhost:${config.port}/`, { + test: 'value' + }).catch(done) + } + }) + + testThatRequestHasVulnerability({ + testDescription: 'should have HEADER_INJECTION vulnerability ' + + 'when the header value is an array with tainted string', + fn: (req, res) => { + setHeaderFunction('custom', ['not_tainted', req.body.test], res) + }, + vulnerability: 'HEADER_INJECTION', + occurrencesAndLocation: { + occurrences: 1, + location: { + path: setHeaderFunctionFilename, + line: 4 + } + }, + cb: (headerInjectionVulnerabilities) => { + const evidenceString = headerInjectionVulnerabilities[0].evidence.valueParts + .map(part => part.value).join('') + + expect(evidenceString).to.be.equal('custom: value') + }, + makeRequest: (done, config) => { + return axios.post(`http://localhost:${config.port}/`, { + test: 'value' + }).catch(done) + } + }) + + testThatRequestHasNoVulnerability({ + testDescription: 'should not have HEADER_INJECTION vulnerability ' + + 'when the header value an array without tainteds', + fn: (req, res) => { + setHeaderFunction('custom', ['not tainted string 1', 'not tainted string 2'], res) + }, + vulnerability: 'HEADER_INJECTION' + }) + + testThatRequestHasNoVulnerability({ + testDescription: 'should not have HEADER_INJECTION vulnerability when the header value is not tainted', + fn: (req, res) => { + setHeaderFunction('custom', 'not tainted string', res) + }, + vulnerability: 'HEADER_INJECTION' + }) + + testThatRequestHasNoVulnerability({ + testDescription: 'should not have HEADER_INJECTION vulnerability when the header is "location"', + fn: (req, res) => { + setHeaderFunction('location', req.body.test, res) + }, + vulnerability: 'HEADER_INJECTION', + makeRequest: (done, config) => { + return axios.post(`http://localhost:${config.port}/`, { + test: 'https://www.datadoghq.com' + }).catch(done) + } + }) + + testThatRequestHasNoVulnerability({ + testDescription: 'should not have HEADER_INJECTION vulnerability when the header is "Sec-WebSocket-Location"', + fn: (req, res) => { + setHeaderFunction('Sec-WebSocket-Location', req.body.test, res) + }, + vulnerability: 'HEADER_INJECTION', + makeRequest: (done, config) => { + return axios.post(`http://localhost:${config.port}/`, { + test: 'https://www.datadoghq.com' + }).catch(done) + } + }) + + testThatRequestHasNoVulnerability({ + testDescription: 'should not have HEADER_INJECTION vulnerability when the header is "Sec-WebSocket-Accept"', + fn: (req, res) => { + setHeaderFunction('Sec-WebSocket-Accept', req.body.test, res) + }, + vulnerability: 'HEADER_INJECTION', + makeRequest: (done, config) => { + return axios.post(`http://localhost:${config.port}/`, { + test: 'https://www.datadoghq.com' + }).catch(done) + } + }) + + testThatRequestHasNoVulnerability({ + testDescription: 'should not have HEADER_INJECTION vulnerability when the header is "Upgrade"', + fn: (req, res) => { + setHeaderFunction('Upgrade', req.body.test, res) + }, + vulnerability: 'HEADER_INJECTION', + makeRequest: (done, config) => { + return axios.post(`http://localhost:${config.port}/`, { + test: 'https://www.datadoghq.com' + }).catch(done) + } + }) + + testThatRequestHasNoVulnerability({ + testDescription: 'should not have HEADER_INJECTION vulnerability when the header is "Connection"', + fn: (req, res) => { + setHeaderFunction('Upgrade', req.body.test, res) + }, + vulnerability: 'HEADER_INJECTION', + makeRequest: (done, config) => { + return axios.post(`http://localhost:${config.port}/`, { + test: 'https://www.datadoghq.com' + }).catch(done) + } + }) + + testThatRequestHasNoVulnerability({ + testDescription: 'should not have HEADER_INJECTION vulnerability ' + + 'when the header is "access-control-allow-origin" and the origin is a header', + fn: (req, res) => { + setHeaderFunction('access-control-allow-origin', req.headers.testheader, res) + }, + vulnerability: 'HEADER_INJECTION', + makeRequest: (done, config) => { + return axios.get(`http://localhost:${config.port}/`, { + headers: { + testheader: 'headerValue' + } + }).catch(done) + } + }) + + testThatRequestHasVulnerability({ + testDescription: 'should have HEADER_INJECTION vulnerability ' + + 'when the header is "access-control-allow-origin" and the origin is not a header', + fn: (req, res) => { + setHeaderFunction('access-control-allow-origin', req.body.test, res) + }, + vulnerability: 'HEADER_INJECTION', + makeRequest: (done, config) => { + return axios.post(`http://localhost:${config.port}/`, { + test: 'https://www.datadoghq.com' + }, { + headers: { + testheader: 'headerValue' + } + }).catch(done) + } + }) + + testThatRequestHasNoVulnerability({ + testDescription: 'should not have HEADER_INJECTION vulnerability ' + + 'when the header is "set-cookie" and the origin is a cookie', + fn: (req, res) => { + setHeaderFunction('set-cookie', req.cookies.cookie1, res) + }, + vulnerability: 'HEADER_INJECTION', + makeRequest: (done, config) => { + return axios.get(`http://localhost:${config.port}/`, { + headers: { + Cookie: 'cookie1=value' + } + }).catch(done) + } + }) + + testThatRequestHasVulnerability({ + testDescription: 'should have HEADER_INJECTION vulnerability when ' + + 'the header is "access-control-allow-origin" and the origin is not a header', + fn: (req, res) => { + setHeaderFunction('set-cookie', req.body.test, res) + }, + vulnerability: 'HEADER_INJECTION', + makeRequest: (done, config) => { + return axios.post(`http://localhost:${config.port}/`, { + test: 'key=value' + }, { + headers: { + testheader: 'headerValue' + } + }).catch(done) + } + }) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/set-header-function.js b/packages/dd-trace/test/appsec/iast/analyzers/resources/set-header-function.js new file mode 100644 index 00000000000..f2e4e1d4ef2 --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/set-header-function.js @@ -0,0 +1,7 @@ +'use strict' + +function setHeader (name, value, res) { + res.setHeader(name, value) +} + +module.exports = { setHeader } diff --git a/packages/dd-trace/test/appsec/iast/utils.js b/packages/dd-trace/test/appsec/iast/utils.js index b2847d02db3..b48404e8a93 100644 --- a/packages/dd-trace/test/appsec/iast/utils.js +++ b/packages/dd-trace/test/appsec/iast/utils.js @@ -298,6 +298,13 @@ function prepareTestServerForIastInExpress (description, expressVersion, loadMid if (loadMiddlewares) loadMiddlewares(expressApp) expressApp.use(bodyParser.json()) + try { + const cookieParser = require(`../../../../../versions/cookie-parser`).get() + expressApp.use(cookieParser()) + } catch (e) { + // do nothing, in some scenarios we don't have cookie-parser dependency available, and we don't need + // it in all the iast tests + } expressApp.all('/', listener) getPort().then(newPort => { @@ -320,23 +327,23 @@ function prepareTestServerForIastInExpress (description, expressVersion, loadMid return agent.close({ ritmReset: false }) }) - function testThatRequestHasVulnerability (fn, vulnerability, occurrences, cb, makeRequest) { - let testDescription = `should have ${vulnerability} vulnerability` + function testThatRequestHasVulnerability (fn, vulnerability, occurrencesAndLocation, cb, makeRequest) { + let testDescription if (typeof fn === 'object') { const obj = fn fn = obj.fn vulnerability = obj.vulnerability - occurrences = obj.occurrences + occurrencesAndLocation = obj.occurrencesAndLocation || obj.occurrences cb = obj.cb makeRequest = obj.makeRequest testDescription = obj.testDescription || testDescription } - + testDescription = testDescription || `should have ${vulnerability} vulnerability` it(testDescription, function (done) { this.timeout(5000) app = fn - checkVulnerabilityInRequest(vulnerability, occurrences, cb, makeRequest, config, done) + checkVulnerabilityInRequest(vulnerability, occurrencesAndLocation, cb, makeRequest, config, done) }) } diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js index 81fa0180a6d..f005bdd7306 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js +++ b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/index.spec.js @@ -63,16 +63,17 @@ function extractTestParameters (testCase) { describe('Vulnerability formatter', () => { describe('Vulnerability redaction', () => { - suite.filter(testCase => testCase.type === 'VULNERABILITIES').forEach((testCase) => { - if (!testCase.parameters) { - doTest(testCase) - } else { - const testsParameters = extractTestParameters(testCase) - testsParameters.forEach(testParameters => { - doTest(testCase, testParameters) - }) - } - }) + suite.filter(testCase => testCase.type === 'VULNERABILITIES' && testCase.input[0]?.type !== 'XSS') + .forEach((testCase) => { + if (!testCase.parameters) { + doTest(testCase) + } else { + const testsParameters = extractTestParameters(testCase) + testsParameters.forEach(testParameters => { + doTest(testCase, testParameters) + }) + } + }) }) describe('toJson', () => { diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/resources/evidence-redaction-suite.json b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/resources/evidence-redaction-suite.json index 271013c3b5b..4f63b3d1f44 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/resources/evidence-redaction-suite.json +++ b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/resources/evidence-redaction-suite.json @@ -2734,41 +2734,953 @@ ] } }, + { + "type": "VULNERABILITIES", + "description": "Consecutive ranges - at the beginning", + "input": [ + { + "type": "UNVALIDATED_REDIRECT", + "evidence": { + "value": "https://user:password@datadoghq.com:443/api/v1/test/123/?param1=pone¶m2=ptwo#fragment1=fone&fragment2=ftwo", + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "protocol", + "parameterValue": "http" + } + }, + { + "start": 4, + "end": 5, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "secure", + "parameterValue": "s" + } + }, + { + "start": 22, + "end": 35, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "host", + "parameterValue": "datadoghq.com" + } + } + ] + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "protocol", + "value": "http" + }, + { + "origin": "http.request.parameter", + "name": "secure", + "value": "s" + }, + { + "origin": "http.request.parameter", + "name": "host", + "value": "datadoghq.com" + } + ], + "vulnerabilities": [ + { + "type": "UNVALIDATED_REDIRECT", + "evidence": { + "valueParts": [ + { + "source": 0, + "value": "http" + }, + { + "source": 1, + "value": "s" + }, + { + "value": "://" + }, + { + "redacted": true + }, + { + "value": "@" + }, + { + "source": 2, + "value": "datadoghq.com" + }, + { + "value": ":443/api/v1/test/123/?param1=" + }, + { + "redacted": true + }, + { + "value": "¶m2=" + }, + { + "redacted": true + }, + { + "value": "#fragment1=" + }, + { + "redacted": true + }, + { + "value": "&fragment2=" + }, + { + "redacted": true + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Tainted range based redaction ", + "input": [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS vulnerability but can be extended to future ones", + "ranges": [ + { + "start": 123, + "end": 126, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "type", + "parameterValue": "XSS" + } + } + ] + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "type", + "value": "XSS" + } + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { + "redacted": true + }, + { + "source": 0, + "value": "XSS" + }, + { + "redacted": true + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Tainted range based redaction - with redactable source ", + "input": [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS vulnerability but can be extended to future ones", + "ranges": [ + { + "start": 123, + "end": 126, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "password", + "parameterValue": "XSS" + } + } + ] + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "password", + "redacted": true, + "pattern": "abc" + } + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { + "redacted": true + }, + { + "source": 0, + "redacted": true, + "pattern": "abc" + }, + { + "redacted": true + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Tainted range based redaction - with null source ", + "input": [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS vulnerability but can be extended to future ones", + "ranges": [ + { + "start": 123, + "end": 126, + "iinfo": { + "type": "http.request.body" + } + } + ] + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.body" + } + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { + "redacted": true + }, + { + "source": 0, + "value": "XSS" + }, + { + "redacted": true + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Tainted range based redaction - multiple ranges", + "input": [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS vulnerability but can be extended to future ones", + "ranges": [ + { + "start": 16, + "end": 26, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "text", + "parameterValue": "super long" + } + }, + { + "start": 123, + "end": 126, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "type", + "parameterValue": "XSS" + } + } + ] + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "text", + "value": "super long" + }, + { + "origin": "http.request.parameter", + "name": "type", + "value": "XSS" + } + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { + "redacted": true + }, + { + "source": 0, + "value": "super long" + }, + { + "redacted": true + }, + { + "source": 1, + "value": "XSS" + }, + { + "redacted": true + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Tainted range based redaction - first range at the beginning ", + "input": [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS vulnerability but can be extended to future ones", + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "text", + "parameterValue": "this" + } + }, + { + "start": 123, + "end": 126, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "type", + "parameterValue": "XSS" + } + } + ] + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "text", + "value": "this" + }, + { + "origin": "http.request.parameter", + "name": "type", + "value": "XSS" + } + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { + "source": 0, + "value": "this" + }, + { + "redacted": true + }, + { + "source": 1, + "value": "XSS" + }, + { + "redacted": true + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Tainted range based redaction - last range at the end ", + "input": [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS", + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "text", + "parameterValue": "this" + } + }, + { + "start": 123, + "end": 126, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "type", + "parameterValue": "XSS" + } + } + ] + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "text", + "value": "this" + }, + { + "origin": "http.request.parameter", + "name": "type", + "value": "XSS" + } + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { + "source": 0, + "value": "this" + }, + { + "redacted": true + }, + { + "source": 1, + "value": "XSS" + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Tainted range based redaction - whole text ", + "input": [ + { + "type": "XSS", + "evidence": { + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS", + "ranges": [ + { + "start": 0, + "end": 126, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "text", + "parameterValue": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS" + } + } + ] + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "text", + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS" + } + ], + "vulnerabilities": [ + { + "type": "XSS", + "evidence": { + "valueParts": [ + { + "source": 0, + "value": "this could be a super long text, so we need to reduce it before send it to the backend. This redaction strategy applies to XSS" + } + ] + } + } + ] + } + }, { "type": "VULNERABILITIES", "description": "Mongodb json query with sensitive source", "input": [ { - "type": "NOSQL_MONGODB_INJECTION", + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "value": { + "password": "1234" + }, + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "password", + "parameterValue": "1234" + } + } + ], + "rangesToApply": { + "password": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "password", + "parameterValue": "1234" + } + } + ] + } + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "password", + "redacted": true, + "pattern": "abcd" + } + ], + "vulnerabilities": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "valueParts": [ + { + "value": "{\n \"password\": \"" + }, + { + "source": 0, + "redacted": true, + "pattern": "abcd" + }, + { + "value": "\"\n}" + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Mongodb json query with non sensitive source", + "input": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "value": { + "username": "user" + }, + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ], + "rangesToApply": { + "username": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ] + } + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "username", + "redacted": true, + "pattern": "abcd" + } + ], + "vulnerabilities": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "valueParts": [ + { + "value": "{\n \"username\": \"" + }, + { + "source": 0, + "redacted": true, + "pattern": "abcd" + }, + { + "value": "\"\n}" + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Mongodb json query with partial non sensitive source", + "input": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "value": { + "username": "user" + }, + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "PREFIX_user" + } + } + ], + "rangesToApply": { + "username": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "PREFIX_user" + } + } + ] + } + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "username", + "redacted": true, + "pattern": "abcdefghijk" + } + ], + "vulnerabilities": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "valueParts": [ + { + "value": "{\n \"username\": \"" + }, + { + "source": 0, + "redacted": true, + "pattern": "hijk" + }, + { + "value": "\"\n}" + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Mongodb json query with non sensitive source and other fields", + "input": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "value": { + "username": "user", + "secret": "SECRET_VALUE" + }, + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ], + "rangesToApply": { + "username": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ] + } + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "username", + "redacted": true, + "pattern": "abcd" + } + ], + "vulnerabilities": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "valueParts": [ + { + "value": "{\n \"username\": \"" + }, + { + "source": 0, + "redacted": true, + "pattern": "abcd" + }, + { + "value": "\",\n \"secret\": \"" + }, + { + "redacted": true + }, + { + "value": "\"\n}" + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Mongodb json query with sensitive value in a key", + "input": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "value": { + "username": "user", + "token_usage": { + "bearer zss8dR9QP81A": 10 + } + }, + "ranges": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ], + "rangesToApply": { + "username": [ + { + "start": 0, + "end": 4, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "username", + "parameterValue": "user" + } + } + ] + } + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "username", + "redacted": true, + "pattern": "abcd" + } + ], + "vulnerabilities": [ + { + "type": "NOSQL_MONGODB_INJECTION", + "evidence": { + "valueParts": [ + { + "value": "{\n \"username\": \"" + }, + { + "source": 0, + "redacted": true, + "pattern": "abcd" + }, + { + "value": "\",\n \"token_usage\": {\n \"" + }, + { + "redacted": true + }, + { + "value": "\": " + }, + { + "redacted": true + }, + { + "value": "\n }\n}" + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Header injection without sensitive data", + "input": [ + { + "type": "HEADER_INJECTION", "evidence": { - "value": { - "password": "1234" - }, + "value": "custom: text", "ranges": [ { - "start": 0, - "end": 4, + "start": 8, + "end": 12, "iinfo": { "type": "http.request.parameter", - "parameterName": "password", - "parameterValue": "1234" + "parameterName": "param", + "parameterValue": "text" } } - ], - "rangesToApply": { - "password": [ + ] + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "param", + "value": "text" + } + ], + "vulnerabilities": [ + { + "type": "HEADER_INJECTION", + "evidence": { + "valueParts": [ { - "start": 0, - "end": 4, - "iinfo": { - "type": "http.request.parameter", - "parameterName": "password", - "parameterValue": "1234" - } + "value": "custom: " + }, + { + "source": 0, + "value": "text" } ] } } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Header injection with only sensitive data from tainted", + "input": [ + { + "type": "HEADER_INJECTION", + "evidence": { + "value": "custom: pass", + "ranges": [ + { + "start": 8, + "end": 12, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "password", + "parameterValue": "pass" + } + } + ] + } } ], "expected": { @@ -2782,19 +3694,16 @@ ], "vulnerabilities": [ { - "type": "NOSQL_MONGODB_INJECTION", + "type": "HEADER_INJECTION", "evidence": { "valueParts": [ { - "value": "{\n \"password\": \"" + "value": "custom: " }, { "source": 0, "redacted": true, "pattern": "abcd" - }, - { - "value": "\"\n}" } ] } @@ -2804,38 +3713,23 @@ }, { "type": "VULNERABILITIES", - "description": "Mongodb json query with non sensitive source", + "description": "Header injection with partial sensitive data from tainted", "input": [ { - "type": "NOSQL_MONGODB_INJECTION", + "type": "HEADER_INJECTION", "evidence": { - "value": { - "username": "user" - }, + "value": "custom: this is pass", "ranges": [ { - "start": 0, - "end": 4, + "start": 16, + "end": 20, "iinfo": { "type": "http.request.parameter", - "parameterName": "username", - "parameterValue": "user" + "parameterName": "password", + "parameterValue": "pass" } } - ], - "rangesToApply": { - "username": [ - { - "start": 0, - "end": 4, - "iinfo": { - "type": "http.request.parameter", - "parameterName": "username", - "parameterValue": "user" - } - } - ] - } + ] } } ], @@ -2843,26 +3737,23 @@ "sources": [ { "origin": "http.request.parameter", - "name": "username", + "name": "password", "redacted": true, "pattern": "abcd" } ], "vulnerabilities": [ { - "type": "NOSQL_MONGODB_INJECTION", + "type": "HEADER_INJECTION", "evidence": { "valueParts": [ { - "value": "{\n \"username\": \"" + "value": "custom: this is " }, { "source": 0, "redacted": true, "pattern": "abcd" - }, - { - "value": "\"\n}" } ] } @@ -2872,38 +3763,23 @@ }, { "type": "VULNERABILITIES", - "description": "Mongodb json query with partial non sensitive source", + "description": "Header injection with sensitive data from header name", "input": [ { - "type": "NOSQL_MONGODB_INJECTION", + "type": "HEADER_INJECTION", "evidence": { - "value": { - "username": "user" - }, + "value": "password: text", "ranges": [ { - "start": 0, - "end": 4, + "start": 10, + "end": 14, "iinfo": { "type": "http.request.parameter", - "parameterName": "username", - "parameterValue": "PREFIX_user" + "parameterName": "param", + "parameterValue": "text" } } - ], - "rangesToApply": { - "username": [ - { - "start": 0, - "end": 4, - "iinfo": { - "type": "http.request.parameter", - "parameterName": "username", - "parameterValue": "PREFIX_user" - } - } - ] - } + ] } } ], @@ -2911,26 +3787,23 @@ "sources": [ { "origin": "http.request.parameter", - "name": "username", + "name": "param", "redacted": true, - "pattern": "abcdefghijk" + "pattern": "abcd" } ], "vulnerabilities": [ { - "type": "NOSQL_MONGODB_INJECTION", + "type": "HEADER_INJECTION", "evidence": { "valueParts": [ { - "value": "{\n \"username\": \"" + "value": "password: " }, { "source": 0, "redacted": true, - "pattern": "hijk" - }, - { - "value": "\"\n}" + "pattern": "abcd" } ] } @@ -2940,39 +3813,23 @@ }, { "type": "VULNERABILITIES", - "description": "Mongodb json query with non sensitive source and other fields", + "description": "Header injection with sensitive data from header value", "input": [ { - "type": "NOSQL_MONGODB_INJECTION", + "type": "HEADER_INJECTION", "evidence": { - "value": { - "username": "user", - "secret": "SECRET_VALUE" - }, + "value": "custom: bearer 1234123", "ranges": [ { - "start": 0, - "end": 4, + "start": 15, + "end": 22, "iinfo": { "type": "http.request.parameter", - "parameterName": "username", - "parameterValue": "user" + "parameterName": "param", + "parameterValue": "1234123" } } - ], - "rangesToApply": { - "username": [ - { - "start": 0, - "end": 4, - "iinfo": { - "type": "http.request.parameter", - "parameterName": "username", - "parameterValue": "user" - } - } - ] - } + ] } } ], @@ -2980,32 +3837,26 @@ "sources": [ { "origin": "http.request.parameter", - "name": "username", + "name": "param", "redacted": true, - "pattern": "abcd" + "pattern": "abcdefg" } ], "vulnerabilities": [ { - "type": "NOSQL_MONGODB_INJECTION", + "type": "HEADER_INJECTION", "evidence": { "valueParts": [ { - "value": "{\n \"username\": \"" - }, - { - "source": 0, - "redacted": true, - "pattern": "abcd" - }, - { - "value": "\",\n \"secret\": \"" + "value": "custom: " }, { "redacted": true }, { - "value": "\"\n}" + "source": 0, + "redacted": true, + "pattern": "abcdefg" } ] } @@ -3015,41 +3866,23 @@ }, { "type": "VULNERABILITIES", - "description": "Mongodb json query with sensitive value in a key", + "description": "Header injection with sensitive data from header and tainted", "input": [ { - "type": "NOSQL_MONGODB_INJECTION", + "type": "HEADER_INJECTION", "evidence": { - "value": { - "username": "user", - "token_usage": { - "bearer zss8dR9QP81A": 10 - } - }, + "value": "password: this is pass", "ranges": [ { - "start": 0, - "end": 4, + "start": 18, + "end": 22, "iinfo": { "type": "http.request.parameter", - "parameterName": "username", - "parameterValue": "user" + "parameterName": "password", + "parameterValue": "pass" } } - ], - "rangesToApply": { - "username": [ - { - "start": 0, - "end": 4, - "iinfo": { - "type": "http.request.parameter", - "parameterName": "username", - "parameterValue": "user" - } - } - ] - } + ] } } ], @@ -3057,38 +3890,79 @@ "sources": [ { "origin": "http.request.parameter", - "name": "username", + "name": "password", "redacted": true, "pattern": "abcd" } ], "vulnerabilities": [ { - "type": "NOSQL_MONGODB_INJECTION", + "type": "HEADER_INJECTION", "evidence": { "valueParts": [ { - "value": "{\n \"username\": \"" + "value": "password: " + }, + { + "redacted": true }, { "source": 0, "redacted": true, "pattern": "abcd" - }, - { - "value": "\",\n \"token_usage\": {\n \"" - }, - { - "redacted": true - }, + } + ] + } + } + ] + } + }, + { + "type": "VULNERABILITIES", + "description": "Header injection with sensitive data from header and tainted (source does not match)", + "input": [ + { + "type": "HEADER_INJECTION", + "evidence": { + "value": "password: this is key word", + "ranges": [ + { + "start": 18, + "end": 26, + "iinfo": { + "type": "http.request.parameter", + "parameterName": "password", + "parameterValue": "key%20word" + } + } + ] + } + } + ], + "expected": { + "sources": [ + { + "origin": "http.request.parameter", + "name": "password", + "redacted": true, + "pattern": "abcdefghij" + } + ], + "vulnerabilities": [ + { + "type": "HEADER_INJECTION", + "evidence": { + "valueParts": [ { - "value": "\": " + "value": "password: " }, { "redacted": true }, { - "value": "\n }\n}" + "source": 0, + "redacted": true, + "pattern": "********" } ] } From 19a3a3ac58d61e67f2185bae77f53fc4dc969f8b Mon Sep 17 00:00:00 2001 From: Nicolas Savoire Date: Wed, 6 Dec 2023 09:43:01 +0100 Subject: [PATCH 096/147] Add integration test for OOM in worker (#3843) Bump pprof-nodejs version and add integration test for OOM in worker: Worker should exit but process should not abort. --- integration-tests/profiler.spec.js | 12 +++++++--- integration-tests/profiler/oom.js | 35 ++++++++++++++++++++---------- package.json | 2 +- yarn.lock | 8 +++---- 4 files changed, 38 insertions(+), 19 deletions(-) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index 90482df58a4..654d89a25ce 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -252,8 +252,6 @@ describe('profiler', () => { oomEnv = { DD_TRACE_AGENT_PORT: agent.port, DD_PROFILING_ENABLED: 1, - DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED: 1, - DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'process', DD_TRACE_DEBUG: 1, DD_TRACE_LOG_LEVEL: 'warn' } @@ -284,7 +282,7 @@ describe('profiler', () => { return checkProfiles(agent, proc, timeout, ['space'], true) }) - it('sends a heap profile on OOM with external process and ends successfully', async () => { + it('sends a heap profile on OOM with external process and exits successfully', async () => { proc = fork(oomTestFile, { cwd, execArgv: oomExecArgv, @@ -324,5 +322,13 @@ describe('profiler', () => { }) return checkProfiles(agent, proc, timeout, ['space'], true, 2) }) + + it('sends a heap profile on OOM in worker thread and exits successfully', async () => { + proc = fork(oomTestFile, [1, 50], { + cwd, + env: { ...oomEnv, DD_PROFILING_WALLTIME_ENABLED: 0 } + }) + return checkProfiles(agent, proc, timeout, ['space'], false, 2) + }) }) }) diff --git a/integration-tests/profiler/oom.js b/integration-tests/profiler/oom.js index 043201a5f24..eb7c9e22991 100644 --- a/integration-tests/profiler/oom.js +++ b/integration-tests/profiler/oom.js @@ -1,16 +1,32 @@ 'use strict' +/* eslint-disable no-console */ + require('dd-trace').init() -const { Worker, isMainThread } = require('worker_threads') +const { Worker, isMainThread, threadId } = require('worker_threads') + +const nworkers = Number(process.argv[2] || 0) +const workerMaxOldGenerationSizeMb = process.argv[3] +const maxCount = process.argv[4] || 12 +const sleepMs = process.argv[5] || 50 +const sizeQuantum = process.argv[6] || 5 * 1024 * 1024 + +console.log(`${isMainThread ? 'Main thread' : `Worker ${threadId}`}: \ +nworkers=${nworkers} workerMaxOldGenerationSizeMb=${workerMaxOldGenerationSizeMb} \ +maxCount=${maxCount} sleepMs=${sleepMs} sizeQuantum=${sizeQuantum}`) if (isMainThread) { - const nworkers = Number(process.argv[2]) - const workers = [] - if (nworkers) { - for (let i = 0; i < nworkers; i++) { - workers.push(new Worker(__filename)) - } + for (let i = 0; i < nworkers; i++) { + const worker = new Worker(__filename, + { + argv: [0, ...process.argv.slice(3)], + ...(workerMaxOldGenerationSizeMb ? { resourceLimits: { maxOldGenerationSizeMb: 50 } } : {}) + }) + const threadId = worker.threadId + worker + .on('error', (err) => { console.log(`Worker ${threadId} error: ${err}`) }) + .on('exit', (code) => { console.log(`Worker ${threadId} exit: ${code}`) }) } } @@ -28,7 +44,4 @@ function foo (size) { if (count < maxCount) { setTimeout(() => foo(size), sleepMs) } } -const maxCount = process.argv[3] || 12 -const sleepMs = process.argv[4] || 50 - -setTimeout(() => foo(5 * 1024 * 1024), sleepMs) +setTimeout(() => foo(sizeQuantum), sleepMs) diff --git a/package.json b/package.json index 82940ca367b..3ad73ae3260 100644 --- a/package.json +++ b/package.json @@ -72,7 +72,7 @@ "@datadog/native-iast-rewriter": "2.2.1", "@datadog/native-iast-taint-tracking": "1.6.4", "@datadog/native-metrics": "^2.0.0", - "@datadog/pprof": "4.0.1", + "@datadog/pprof": "4.1.0", "@datadog/sketches-js": "^2.1.0", "@opentelemetry/api": "^1.0.0", "@opentelemetry/core": "^1.14.0", diff --git a/yarn.lock b/yarn.lock index d4b1dd55c2f..f0778b55a7e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -415,10 +415,10 @@ node-addon-api "^6.1.0" node-gyp-build "^3.9.0" -"@datadog/pprof@4.0.1": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@datadog/pprof/-/pprof-4.0.1.tgz#f8629ecb62646d90ed49b6252dd0583d8d5001d3" - integrity sha512-TavqyiyQZOaUM9eQB07r8+K/T1CqKyOdsUGxpN79+BF+eOQBpTj/Cte6KdlhcUSKL3h5hSjC+vlgA7uW2qtVhA== +"@datadog/pprof@4.1.0": + version "4.1.0" + resolved "https://registry.yarnpkg.com/@datadog/pprof/-/pprof-4.1.0.tgz#db86a720f1dfecbcab8838bc1f148eb0a402af55" + integrity sha512-g7EWI185nwSuFwlmnAGDPxbPsqe+ipOoDB2oP841WMNRaJBPRdg5J90c+6ucmyltuC9VpTrmzzqcachkOTzZEQ== dependencies: delay "^5.0.0" node-gyp-build "<4.0" From 8aa146fb0b3358c32d5ea8b1272d3e70aa30ec86 Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Fri, 8 Dec 2023 12:37:56 -0500 Subject: [PATCH 097/147] [core] Ensure Explicit Timeouts from Underlying Request Socket are Recorded as Errors When Using Node 20 (#3853) * req.socket timeouts recorded as errors node 20 * check for process.send --- .../src/http/client.js | 16 +++++++-- packages/datadog-plugin-http/src/client.js | 2 +- .../datadog-plugin-http/test/client.spec.js | 33 +++++++++++++++++++ .../test/integration-test/server.mjs | 4 ++- .../test/integration-test/server.mjs | 4 ++- 5 files changed, 54 insertions(+), 5 deletions(-) diff --git a/packages/datadog-instrumentations/src/http/client.js b/packages/datadog-instrumentations/src/http/client.js index fcf5cc05f0a..33ac14e4376 100644 --- a/packages/datadog-instrumentations/src/http/client.js +++ b/packages/datadog-instrumentations/src/http/client.js @@ -69,17 +69,29 @@ function patch (http, methodName) { try { const req = request.call(this, options, callback) const emit = req.emit - const setTimeout = req.setTimeout + + const requestSetTimeout = req.setTimeout ctx.req = req // tracked to accurately discern custom request socket timeout let customRequestTimeout = false + req.setTimeout = function () { customRequestTimeout = true - return setTimeout.apply(this, arguments) + return requestSetTimeout.apply(this, arguments) } + req.on('socket', socket => { + if (socket) { + const socketSetTimeout = socket.setTimeout + socket.setTimeout = function () { + customRequestTimeout = true + return socketSetTimeout.apply(this, arguments) + } + } + }) + req.emit = function (eventName, arg) { switch (eventName) { case 'response': { diff --git a/packages/datadog-plugin-http/src/client.js b/packages/datadog-plugin-http/src/client.js index 42833bb896f..65081dee34e 100644 --- a/packages/datadog-plugin-http/src/client.js +++ b/packages/datadog-plugin-http/src/client.js @@ -121,7 +121,7 @@ class HttpClientPlugin extends ClientPlugin { } else { // conditions for no error: // 1. not using a custom agent instance with custom timeout specified - // 2. no invocation of `req.setTimeout` + // 2. no invocation of `req.setTimeout` or `socket.setTimeout` if (!args.options.agent?.options.timeout && !customRequestTimeout) return span.setTag('error', 1) diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index 7256950ac83..3c50a19f228 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -900,6 +900,39 @@ describe('Plugin', () => { }) }) }).timeout(10000) + + it('should record error if req.socket.setTimeout is used with Node 20', done => { + const app = express() + + app.get('/user', async (req, res) => { + await new Promise(resolve => { + setTimeout(resolve, 6 * 1000) + }) + res.status(200).send() + }) + + getPort().then(port => { + agent + .use(traces => { + expect(traces[0][0]).to.have.property('error', 1) + }) + .then(done) + .catch(done) + + appListener = server(app, port, async () => { + const req = http.request(`${protocol}://localhost:${port}/user`, res => { + res.on('data', () => { }) + }) + + req.on('error', () => {}) + req.on('socket', socket => { + socket.setTimeout(5000)// match default timeout + }) + + req.end() + }) + }) + }).timeout(10000) } it('should only record a request once', done => { diff --git a/packages/datadog-plugin-http/test/integration-test/server.mjs b/packages/datadog-plugin-http/test/integration-test/server.mjs index 762cb7e9c84..01f59e02003 100644 --- a/packages/datadog-plugin-http/test/integration-test/server.mjs +++ b/packages/datadog-plugin-http/test/integration-test/server.mjs @@ -10,5 +10,7 @@ const server = http.createServer(async (req, res) => { } }).listen(0, () => { const port = server.address().port - process.send({ port }) + if (process.send) { + process.send({ port }) + } }) diff --git a/packages/datadog-plugin-http2/test/integration-test/server.mjs b/packages/datadog-plugin-http2/test/integration-test/server.mjs index 861e64dc7bd..d3a13bcc780 100644 --- a/packages/datadog-plugin-http2/test/integration-test/server.mjs +++ b/packages/datadog-plugin-http2/test/integration-test/server.mjs @@ -7,5 +7,7 @@ const server = http2.createServer((req, res) => { server.listen(0, () => { const port = server.address().port - process.send({ port }) + if (process.send) { + process.send({ port }) + } }) From 67b071e7cdbd66cb60d3a19a3e2c098de3975f79 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Fri, 8 Dec 2023 12:18:58 -0800 Subject: [PATCH 098/147] chore: updating protobuf for security reasons (#3851) --- package.json | 2 +- yarn.lock | 2052 ++++++++++++++++++++++++++------------------------ 2 files changed, 1069 insertions(+), 985 deletions(-) diff --git a/package.json b/package.json index 3ad73ae3260..bcb6d20ba98 100644 --- a/package.json +++ b/package.json @@ -98,7 +98,7 @@ "opentracing": ">=0.12.1", "path-to-regexp": "^0.1.2", "pprof-format": "^2.0.7", - "protobufjs": "^7.2.4", + "protobufjs": "^7.2.5", "retry": "^0.13.1", "semver": "^7.5.4" }, diff --git a/yarn.lock b/yarn.lock index f0778b55a7e..fa4a7e11d31 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,55 +2,71 @@ # yarn lockfile v1 -"@ampproject/remapping@^2.1.0", "@ampproject/remapping@^2.2.0": +"@ampproject/remapping@^2.1.0": version "2.2.0" - resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.0.tgz#56c133824780de3174aed5ab6834f3026790154d" + resolved "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz" integrity sha512-qRmjj8nj9qmLTQXXmaR1cck3UXSRMPrbsLJAasZpF+t3riI71BXed5ebIOYwQntykeZuhjsdweEc9BxH5Jc26w== dependencies: "@jridgewell/gen-mapping" "^0.1.0" "@jridgewell/trace-mapping" "^0.3.9" +"@ampproject/remapping@^2.2.0": + version "2.2.1" + resolved "https://registry.yarnpkg.com/@ampproject/remapping/-/remapping-2.2.1.tgz#99e8e11851128b8702cd57c33684f1d0f260b630" + integrity sha512-lFMjJTrFL3j7L9yBxwYfCq2k6qqwHyzuUl/XBnif78PWTJYyL/dfowQHWE3sp6U6ZzqWiiIZnpTMO96zhkjwtg== + dependencies: + "@jridgewell/gen-mapping" "^0.3.0" + "@jridgewell/trace-mapping" "^0.3.9" + "@babel/code-frame@^7.18.6": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.18.6.tgz" integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== dependencies: "@babel/highlight" "^7.18.6" +"@babel/code-frame@^7.22.13", "@babel/code-frame@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.23.5.tgz#9009b69a8c602293476ad598ff53e4562e15c244" + integrity "sha1-kAm2moxgIpNHatWY/1PkVi4VwkQ= sha512-CgH3s1a96LipHCmSUmYFPwY7MNx8C3avkq7i4Wl3cfa662ldtUe4VM1TPXX70pfmrlWTb6jLqTYrZyT2ZTJBgA==" + dependencies: + "@babel/highlight" "^7.23.4" + chalk "^2.4.2" + "@babel/compat-data@^7.19.3": version "7.19.3" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.19.3.tgz#707b939793f867f5a73b2666e6d9a3396eb03151" + resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.19.3.tgz" integrity sha512-prBHMK4JYYK+wDjJF1q99KK4JLL+egWS4nmNqdlMUgCExMZ+iZW0hGhyC3VEbsPjvaN0TBhW//VIFwBrk8sEiw== -"@babel/compat-data@^7.20.5": - version "7.21.0" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.21.0.tgz#c241dc454e5b5917e40d37e525e2f4530c399298" - integrity sha512-gMuZsmsgxk/ENC3O/fRw5QY8A9/uxQbbCEypnLIiYYc/qVJtEV7ouxC3EllIIwNzMqAQee5tanFabWsUOutS7g== +"@babel/compat-data@^7.20.5", "@babel/compat-data@^7.22.9": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.23.5.tgz#ffb878728bb6bdcb6f4510aa51b1be9afb8cfd98" + integrity "sha1-/7h4cou2vctvRRCqUbG+mvuM/Zg= sha512-uU27kfDRlhfKl+w1U6vp16IuvSLtjAxdArVXPa9BvLkrr7CYIsxH5adpHObeAGY/41+syctUWOZ140a2Rvkgjw==" "@babel/core@^7.5.5": - version "7.21.0" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.21.0.tgz#1341aefdcc14ccc7553fcc688dd8986a2daffc13" - integrity sha512-PuxUbxcW6ZYe656yL3EAhpy7qXKq0DmYsrJLpbB8XrsCP9Nm+XCg9XFMb5vIDliPD7+U/+M+QJlH17XOcB7eXA== + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.23.5.tgz#6e23f2acbcb77ad283c5ed141f824fd9f70101c7" + integrity "sha1-biPyrLy3etKDxe0UH4JP2fcBAcc= sha512-Cwc2XjUrG4ilcfOw4wBAK+enbdgwAcAJCfGUItPBKR7Mjw4aEfAFYrLxeRp4jWgtNIKn3n2AlBOfwwafl+42/g==" dependencies: "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.21.0" - "@babel/helper-compilation-targets" "^7.20.7" - "@babel/helper-module-transforms" "^7.21.0" - "@babel/helpers" "^7.21.0" - "@babel/parser" "^7.21.0" - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.21.0" - "@babel/types" "^7.21.0" - convert-source-map "^1.7.0" + "@babel/code-frame" "^7.23.5" + "@babel/generator" "^7.23.5" + "@babel/helper-compilation-targets" "^7.22.15" + "@babel/helper-module-transforms" "^7.23.3" + "@babel/helpers" "^7.23.5" + "@babel/parser" "^7.23.5" + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.5" + "@babel/types" "^7.23.5" + convert-source-map "^2.0.0" debug "^4.1.0" gensync "^1.0.0-beta.2" - json5 "^2.2.2" - semver "^6.3.0" + json5 "^2.2.3" + semver "^6.3.1" "@babel/core@^7.7.5": version "7.19.3" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.19.3.tgz#2519f62a51458f43b682d61583c3810e7dcee64c" + resolved "https://registry.npmjs.org/@babel/core/-/core-7.19.3.tgz" integrity sha512-WneDJxdsjEvyKtXKsaBGbDeiyOjR5vYq4HcShxnIbG0qixpoHjI3MqeZM9NDvsojNCEBItQE4juOo/bU6e72gQ== dependencies: "@ampproject/remapping" "^2.1.0" @@ -71,33 +87,33 @@ "@babel/generator@^7.19.3": version "7.19.3" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.19.3.tgz#d7f4d1300485b4547cb6f94b27d10d237b42bf59" + resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.19.3.tgz" integrity sha512-fqVZnmp1ncvZU757UzDheKZpfPgatqY59XtW2/j/18H7u76akb8xqvjw82f+i2UKd/ksYsSick/BCLQUUtJ/qQ== dependencies: "@babel/types" "^7.19.3" "@jridgewell/gen-mapping" "^0.3.2" jsesc "^2.5.1" -"@babel/generator@^7.21.0", "@babel/generator@^7.21.1": - version "7.21.1" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.21.1.tgz#951cc626057bc0af2c35cd23e9c64d384dea83dd" - integrity sha512-1lT45bAYlQhFn/BHivJs43AiW2rg3/UbLyShGfF3C0KmHvO5fSghWd5kBJy30kpRRucGzXStvnnCFniCR2kXAA== +"@babel/generator@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.5.tgz#17d0a1ea6b62f351d281350a5f80b87a810c4755" + integrity "sha1-F9Ch6mti81HSgTUKX4C4eoEMR1U= sha512-BPssCHrBD+0YrxviOa3QzpqwhNIXKEtOa2jQrm4FlmkC2apYgRnQcmPWiGZDlGxiNtltnUFolMe8497Esry+jA==" dependencies: - "@babel/types" "^7.21.0" + "@babel/types" "^7.23.5" "@jridgewell/gen-mapping" "^0.3.2" "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" -"@babel/helper-annotate-as-pure@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.18.6.tgz#eaa49f6f80d5a33f9a5dd2276e6d6e451be0a6bb" - integrity sha512-duORpUiYrEpzKIop6iNbjnwKLAKnJ47csTyRACyEmWj0QdUrm5aqNJGHSSEQSUAvNW0ojX0dOmK9dZduvkfeXA== +"@babel/helper-annotate-as-pure@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" + integrity sha512-LvBTxu8bQSQkcyKOU+a1btnNFQ1dMAd0R6PyW3arXes06F6QLWLIrd681bxRPIXlrMGR3XYnW9JyML7dP3qgxg== dependencies: - "@babel/types" "^7.18.6" + "@babel/types" "^7.22.5" "@babel/helper-compilation-targets@^7.19.3": version "7.19.3" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz#a10a04588125675d7c7ae299af86fa1b2ee038ca" + resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.19.3.tgz" integrity sha512-65ESqLGyGmLvgR0mst5AdW1FkNlj9rQsCKduzEoEPhBCDFGXvz2jW6bXFG6i0/MrV2s7hhXjjb2yAzcPuQlLwg== dependencies: "@babel/compat-data" "^7.19.3" @@ -105,55 +121,59 @@ browserslist "^4.21.3" semver "^6.3.0" -"@babel/helper-compilation-targets@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.20.7.tgz#a6cd33e93629f5eb473b021aac05df62c4cd09bb" - integrity sha512-4tGORmfQcrc+bvrjb5y3dG9Mx1IOZjsHqQVUz7XCNHO+iTmqxWnVg3KRygjGmpRLJGdQSKuvFinbIb0CnZwHAQ== +"@babel/helper-compilation-targets@^7.20.7", "@babel/helper-compilation-targets@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.22.15.tgz#0698fc44551a26cf29f18d4662d5bf545a6cfc52" + integrity "sha1-Bpj8RFUaJs8p8Y1GYtW/VFps/FI= sha512-y6EEzULok0Qvz8yyLkCvVX+02ic+By2UdOhylwUOvOn9dvYc9mKICJuuU1n1XBI02YWsNsnrY1kc6DVbjcXbtw==" dependencies: - "@babel/compat-data" "^7.20.5" - "@babel/helper-validator-option" "^7.18.6" - browserslist "^4.21.3" + "@babel/compat-data" "^7.22.9" + "@babel/helper-validator-option" "^7.22.15" + browserslist "^4.21.9" lru-cache "^5.1.1" - semver "^6.3.0" + semver "^6.3.1" "@babel/helper-environment-visitor@^7.18.9": version "7.18.9" - resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz#0c0cee9b35d2ca190478756865bb3528422f51be" + resolved "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz" integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== -"@babel/helper-function-name@^7.19.0": - version "7.19.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz#941574ed5390682e872e52d3f38ce9d1bef4648c" - integrity sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w== - dependencies: - "@babel/template" "^7.18.10" - "@babel/types" "^7.19.0" +"@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" + integrity "sha1-lhWdth00op26RUyVn1rkpkm6kWc= sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA==" -"@babel/helper-function-name@^7.21.0": - version "7.21.0" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz#d552829b10ea9f120969304023cd0645fa00b1b4" - integrity sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg== +"@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" + integrity "sha1-H5o829WyaYpnDDDSc1+a+V7VJ1k= sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw==" dependencies: - "@babel/template" "^7.20.7" - "@babel/types" "^7.21.0" + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" -"@babel/helper-hoist-variables@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz#d4d2c8fb4baeaa5c68b99cc8245c56554f926678" - integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== +"@babel/helper-hoist-variables@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" + integrity sha512-wGjk9QZVzvknA6yKIUURb8zY3grXCcOZt+/7Wcy8O2uctxhplmUPkOdlgoNhmdVee2c92JXbf1xpMtVNbfoxRw== dependencies: - "@babel/types" "^7.18.6" + "@babel/types" "^7.22.5" "@babel/helper-module-imports@^7.18.6": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz#1e3ebdbbd08aad1437b428c50204db13c5a3ca6e" + resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz" integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== dependencies: "@babel/types" "^7.18.6" +"@babel/helper-module-imports@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.22.15.tgz#16146307acdc40cc00c3b2c647713076464bdbf0" + integrity "sha1-FhRjB6zcQMwAw7LGR3EwdkZL2/A= sha512-0pYVBnDKZO2fnSPCrgM/6WMc7eS20Fbok+0r88fp+YtWVLZrp4CkafFGIp+W0VKw4a22sgebPT99y+FDNMdP4w==" + dependencies: + "@babel/types" "^7.22.15" + "@babel/helper-module-transforms@^7.19.0": version "7.19.0" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.19.0.tgz#309b230f04e22c58c6a2c0c0c7e50b216d350c30" + resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.19.0.tgz" integrity sha512-3HBZ377Fe14RbLIA+ac3sY4PTgpxHVkFrESaWhoI5PuyXPBBX8+C34qblV9G89ZtycGJCmCI/Ut+VUDK4bltNQ== dependencies: "@babel/helper-environment-visitor" "^7.18.9" @@ -165,107 +185,130 @@ "@babel/traverse" "^7.19.0" "@babel/types" "^7.19.0" -"@babel/helper-module-transforms@^7.21.0": - version "7.21.2" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.21.2.tgz#160caafa4978ac8c00ac66636cb0fa37b024e2d2" - integrity sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ== +"@babel/helper-module-transforms@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.23.3.tgz#d7d12c3c5d30af5b3c0fcab2a6d5217773e2d0f1" + integrity "sha1-19EsPF0wr1s8D8qyptUhd3Pi0PE= sha512-7bBs4ED9OmswdfDzpz4MpWgSrV7FXlc3zIagvLFjS5H+Mk7Snr21vQ6QwrsoCGMfNC4e4LQPdoULEt4ykz0SRQ==" dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-simple-access" "^7.20.2" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/helper-validator-identifier" "^7.19.1" - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.21.2" - "@babel/types" "^7.21.2" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-module-imports" "^7.22.15" + "@babel/helper-simple-access" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/helper-validator-identifier" "^7.22.20" -"@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.8.0": - version "7.20.2" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.20.2.tgz#d1b9000752b18d0877cff85a5c376ce5c3121629" - integrity sha512-8RvlJG2mj4huQ4pZ+rU9lqKi9ZKiRmuvGuM2HlWmkmgOhbs6zEAw6IEiJ5cQqGbDzGZOhwuOQNtZMi/ENLjZoQ== +"@babel/helper-plugin-utils@^7.20.2", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz#dd7ee3735e8a313b9f7b05a773d892e88e6d7295" + integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== "@babel/helper-simple-access@^7.18.6": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz#d6d8f51f4ac2978068df934b569f08f29788c7ea" + resolved "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.18.6.tgz" integrity sha512-iNpIgTgyAvDQpDj76POqg+YEt8fPxx3yaNBg3S30dxNKm2SWfYhD0TGrK/Eu9wHpUW63VQU894TsTg+GLbUa1g== dependencies: "@babel/types" "^7.18.6" -"@babel/helper-simple-access@^7.20.2": - version "7.20.2" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz#0ab452687fe0c2cfb1e2b9e0015de07fc2d62dd9" - integrity sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA== +"@babel/helper-simple-access@^7.22.5": + version "7.22.5" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.22.5.tgz#4938357dc7d782b80ed6dbb03a0fba3d22b1d5de" + integrity sha512-n0H99E/K+Bika3++WNL17POvo4rKWZ7lZEp1Q+fStVbUi8nxPQEBOlTmCOxW/0JsS56SKKQ+ojAe2pHKJHN35w== dependencies: - "@babel/types" "^7.20.2" + "@babel/types" "^7.22.5" "@babel/helper-split-export-declaration@^7.18.6": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz#7367949bc75b20c6d5a5d4a97bba2824ae8ef075" + resolved "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz" integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== dependencies: "@babel/types" "^7.18.6" +"@babel/helper-split-export-declaration@^7.22.6": + version "7.22.6" + resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.22.6.tgz#322c61b7310c0997fe4c323955667f18fcefb91c" + integrity sha512-AsUnxuLhRYsisFiaJwvp1QF+I3KjD5FOxut14q/GzovUe6orHLesW2C7d754kRm53h5gqrz6sFl6sxc4BVtE/g== + dependencies: + "@babel/types" "^7.22.5" + "@babel/helper-string-parser@^7.18.10": version "7.18.10" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.18.10.tgz#181f22d28ebe1b3857fa575f5c290b1aaf659b56" + resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.18.10.tgz" integrity sha512-XtIfWmeNY3i4t7t4D2t02q50HvqHybPqW2ki1kosnvWCwuCMeo81Jf0gwr85jy/neUdg5XDdeFE/80DXiO+njw== -"@babel/helper-string-parser@^7.19.4": - version "7.19.4" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz#38d3acb654b4701a9b77fb0615a96f775c3a9e63" - integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== +"@babel/helper-string-parser@^7.23.4": + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.23.4.tgz#9478c707febcbbe1ddb38a3d91a2e054ae622d83" + integrity "sha1-lHjHB/68u+Hds4o9kaLgVK5iLYM= sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==" "@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": version "7.19.1" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" + resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz" integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== +"@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" + integrity "sha1-xK4ALGHSh55yRYHZZmVYPbwdwOA= sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==" + "@babel/helper-validator-option@^7.18.6": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz#bf0d2b5a509b1f336099e4ff36e1a63aa5db4db8" + resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz" integrity sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw== +"@babel/helper-validator-option@^7.22.15": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.23.5.tgz#907a3fbd4523426285365d1206c423c4c5520307" + integrity "sha1-kHo/vUUjQmKFNl0SBsQjxMVSAwc= sha512-85ttAOMLsr53VgXkTbkx8oA6YTfT4q7/HzXSLEYmjcSTJPMPQtvq1BD79Byep5xMUYbGRzEpDsjUf3dyp54IKw==" + "@babel/helpers@^7.19.0": version "7.19.0" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.19.0.tgz#f30534657faf246ae96551d88dd31e9d1fa1fc18" + resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.19.0.tgz" integrity sha512-DRBCKGwIEdqY3+rPJgG/dKfQy9+08rHIAJx8q2p+HSWP87s2HCrQmaAMMyMll2kIXKCW0cO1RdQskx15Xakftg== dependencies: "@babel/template" "^7.18.10" "@babel/traverse" "^7.19.0" "@babel/types" "^7.19.0" -"@babel/helpers@^7.21.0": - version "7.21.0" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.21.0.tgz#9dd184fb5599862037917cdc9eecb84577dc4e7e" - integrity sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA== +"@babel/helpers@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.23.5.tgz#52f522840df8f1a848d06ea6a79b79eefa72401e" + integrity "sha1-UvUihA348ahI0G6mp5t57vpyQB4= sha512-oO7us8FzTEsG3U6ag9MfdF1iA/7Z6dz+MtFhifZk8C8o453rGJFFWUP1t+ULM9TUIAzC9uxXEiXjOiVMyd7QPg==" dependencies: - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.21.0" - "@babel/types" "^7.21.0" + "@babel/template" "^7.22.15" + "@babel/traverse" "^7.23.5" + "@babel/types" "^7.23.5" "@babel/highlight@^7.18.6": version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz" integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== dependencies: "@babel/helper-validator-identifier" "^7.18.6" chalk "^2.0.0" js-tokens "^4.0.0" +"@babel/highlight@^7.23.4": + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b" + integrity "sha1-7arfTYIy4alhQy23hQkSB+rQYhs= sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==" + dependencies: + "@babel/helper-validator-identifier" "^7.22.20" + chalk "^2.4.2" + js-tokens "^4.0.0" + "@babel/parser@^7.18.10", "@babel/parser@^7.19.3": version "7.19.3" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.19.3.tgz#8dd36d17c53ff347f9e55c328710321b49479a9a" + resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.19.3.tgz" integrity sha512-pJ9xOlNWHiy9+FuFP09DEAFbAn4JskgRsVcc169w2xRBC3FRGuQEwjeIMMND9L2zc0iEhO/tGv4Zq+km+hxNpQ== -"@babel/parser@^7.20.7", "@babel/parser@^7.21.0", "@babel/parser@^7.21.2": - version "7.21.2" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.21.2.tgz#dacafadfc6d7654c3051a66d6fe55b6cb2f2a0b3" - integrity sha512-URpaIJQwEkEC2T9Kn+Ai6Xe/02iNaVCuT/PtoRz3GPVJVDpPd7mLo+VddTbhCRU9TXqW5mSrQfXZyi8kDKOVpQ== +"@babel/parser@^7.22.15", "@babel/parser@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.5.tgz#37dee97c4752af148e1d38c34b856b2507660563" + integrity "sha1-N97pfEdSrxSOHTjDS4VrJQdmBWM= sha512-hOOqoiNXrmGdFbhgCzu6GiURxUgM27Xwd/aPuu8RfHEZPBzL1Z54okAHAQjXfcQNwvrlkAmAp4SlRTZ45vlthQ==" "@babel/plugin-proposal-object-rest-spread@^7.5.5": version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz#aa662940ef425779c75534a5c41e9d936edc390a" - integrity sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg== + resolved "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.20.7.tgz" + integrity "sha1-qmYpQO9CV3nHVTSlxB6dk27cOQo= sha512-d2S98yCiLxDVmBmE8UjGcfPvNEUbA1U5q5WxaWFUGRzJSVAZqm5W6MbPct0jxnegUZ0niLeNX+IOzEs7wYg9Dg==" dependencies: "@babel/compat-data" "^7.20.5" "@babel/helper-compilation-targets" "^7.20.7" @@ -273,143 +316,127 @@ "@babel/plugin-syntax-object-rest-spread" "^7.8.3" "@babel/plugin-transform-parameters" "^7.20.7" -"@babel/plugin-syntax-jsx@^7.18.6": - version "7.18.6" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz#a8feef63b010150abd97f1649ec296e849943ca0" - integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== +"@babel/plugin-syntax-jsx@^7.23.3": + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.23.3.tgz#8f2e4f8a9b5f9aa16067e142c1ac9cd9f810f473" + integrity "sha1-jy5PiptfmqFgZ+FCwayc2fgQ9HM= sha512-EB2MELswq55OHUoRZLGg/zC7QWUKfNLpE57m/S2yr1uEneIgsTgrSzXP3NXEsMkVn76OlaVVnzN+ugObuYGwhg==" dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-syntax-object-rest-spread@^7.8.3": version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz" integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== dependencies: "@babel/helper-plugin-utils" "^7.8.0" "@babel/plugin-transform-destructuring@^7.5.0": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.20.7.tgz#8bda578f71620c7de7c93af590154ba331415454" - integrity sha512-Xwg403sRrZb81IVB79ZPqNQME23yhugYVqgTxAhT99h485F4f+GMELFhhOsscDUB7HCswepKeCKLn/GZvUKoBA== + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.23.3.tgz#8c9ee68228b12ae3dff986e56ed1ba4f3c446311" + integrity "sha1-jJ7mgiixKuPf+YblbtG6TzxEYxE= sha512-n225npDqjDIr967cMScVKHXJs7rout1q+tt50inyBCPkyZ8KxeI6d+GIbSBTT/w/9WdlWDOej3V9HE5Lgk57gw==" dependencies: - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-parameters@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.20.7.tgz#0ee349e9d1bc96e78e3b37a7af423a4078a7083f" - integrity sha512-WiWBIkeHKVOSYPO0pWkxGPfKeWrCJyD3NJ53+Lrp/QMSZbsVPovrVl2aWZ19D/LTVnaDv5Ap7GJ/B2CTOZdrfA== + version "7.23.3" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.23.3.tgz#83ef5d1baf4b1072fa6e54b2b0999a7b2527e2af" + integrity "sha1-g+9dG69LEHL6blSysJmaeyUn4q8= sha512-09lMt6UsUb3/34BbECKVbVwrT9bO6lILWln237z7sLaWnMsTi7Yc9fhX5DLpkJzAGfaReXI22wP41SZmnAA3Vw==" dependencies: - "@babel/helper-plugin-utils" "^7.20.2" + "@babel/helper-plugin-utils" "^7.22.5" "@babel/plugin-transform-react-jsx@^7.3.0": - version "7.21.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.21.0.tgz#656b42c2fdea0a6d8762075d58ef9d4e3c4ab8a2" - integrity sha512-6OAWljMvQrZjR2DaNhVfRz6dkCAVV+ymcLUmaf8bccGOHn2v5rHJK3tTpij0BuhdYWP4LLaqj5lwcdlpAAPuvg== + version "7.23.4" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.23.4.tgz#393f99185110cea87184ea47bcb4a7b0c2e39312" + integrity "sha1-OT+ZGFEQzqhxhOpHvLSnsMLjkxI= sha512-5xOpoPguCZCRbo/JeHlloSkTA8Bld1J/E1/kLfD1nsuiW1m8tduTA1ERCgIZokDflX/IBzKcqR3l7VlRgiIfHA==" dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.20.2" - "@babel/plugin-syntax-jsx" "^7.18.6" - "@babel/types" "^7.21.0" + "@babel/helper-annotate-as-pure" "^7.22.5" + "@babel/helper-module-imports" "^7.22.15" + "@babel/helper-plugin-utils" "^7.22.5" + "@babel/plugin-syntax-jsx" "^7.23.3" + "@babel/types" "^7.23.4" "@babel/template@^7.18.10": version "7.18.10" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.18.10.tgz#6f9134835970d1dbf0835c0d100c9f38de0c5e71" + resolved "https://registry.npmjs.org/@babel/template/-/template-7.18.10.tgz" integrity sha512-TI+rCtooWHr3QJ27kJxfjutghu44DLnasDMwpDqCXVTal9RLp3RSYNh4NdBrRP2cQAoG9A8juOQl6P6oZG4JxA== dependencies: "@babel/code-frame" "^7.18.6" "@babel/parser" "^7.18.10" "@babel/types" "^7.18.10" -"@babel/template@^7.20.7": - version "7.20.7" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.20.7.tgz#a15090c2839a83b02aa996c0b4994005841fd5a8" - integrity sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/parser" "^7.20.7" - "@babel/types" "^7.20.7" - -"@babel/traverse@^7.19.0", "@babel/traverse@^7.19.3": - version "7.19.3" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.19.3.tgz#3a3c5348d4988ba60884e8494b0592b2f15a04b4" - integrity sha512-qh5yf6149zhq2sgIXmwjnsvmnNQC2iw70UFjp4olxucKrWd/dvlUsBI88VSLUsnMNF7/vnOiA+nk1+yLoCqROQ== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.19.3" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.19.0" - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/parser" "^7.19.3" - "@babel/types" "^7.19.3" - debug "^4.1.0" - globals "^11.1.0" - -"@babel/traverse@^7.21.0", "@babel/traverse@^7.21.2": - version "7.21.2" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.21.2.tgz#ac7e1f27658750892e815e60ae90f382a46d8e75" - integrity sha512-ts5FFU/dSUPS13tv8XiEObDu9K+iagEKME9kAbaP7r0Y9KtZJZ+NGndDvWoRAYNpeWafbpFeki3q9QoMD6gxyw== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/generator" "^7.21.1" - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-function-name" "^7.21.0" - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/parser" "^7.21.2" - "@babel/types" "^7.21.2" +"@babel/template@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + integrity "sha1-CVdu/Dgw8EMPRUjvlx3eE1DvLzg= sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w==" + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + +"@babel/traverse@^7.19.0", "@babel/traverse@^7.19.3", "@babel/traverse@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.5.tgz#f546bf9aba9ef2b042c0e00d245990c15508e7ec" + integrity "sha1-9Ua/mrqe8rBCwOANJFmQwVUI5+w= sha512-czx7Xy5a6sapWWRx61m1Ke1Ra4vczu1mCTtJam5zRTBOonfdJ+S/B6HYmGYu3fJtr8GGET3si6IhgWVBhJ/m8w==" + dependencies: + "@babel/code-frame" "^7.23.5" + "@babel/generator" "^7.23.5" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" + "@babel/helper-hoist-variables" "^7.22.5" + "@babel/helper-split-export-declaration" "^7.22.6" + "@babel/parser" "^7.23.5" + "@babel/types" "^7.23.5" debug "^4.1.0" globals "^11.1.0" "@babel/types@^7.18.10", "@babel/types@^7.18.6", "@babel/types@^7.19.0", "@babel/types@^7.19.3": version "7.19.3" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.19.3.tgz#fc420e6bbe54880bce6779ffaf315f5e43ec9624" + resolved "https://registry.npmjs.org/@babel/types/-/types-7.19.3.tgz" integrity sha512-hGCaQzIY22DJlDh9CH7NOxgKkFjBk0Cw9xDO1Xmh2151ti7wiGfQ3LauXzL4HP1fmFlTX6XjpRETTpUcv7wQLw== dependencies: "@babel/helper-string-parser" "^7.18.10" "@babel/helper-validator-identifier" "^7.19.1" to-fast-properties "^2.0.0" -"@babel/types@^7.20.2", "@babel/types@^7.20.7", "@babel/types@^7.21.0", "@babel/types@^7.21.2": - version "7.21.2" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.21.2.tgz#92246f6e00f91755893c2876ad653db70c8310d1" - integrity sha512-3wRZSs7jiFaB8AjxiiD+VqN5DTG2iRvJGQ+qYFrs/654lg6kGTQWIOFjlBo5RaXuAZjBmP3+OQH4dmhqiiyYxw== +"@babel/types@^7.22.15", "@babel/types@^7.22.5", "@babel/types@^7.23.0", "@babel/types@^7.23.4", "@babel/types@^7.23.5": + version "7.23.5" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.5.tgz#48d730a00c95109fa4393352705954d74fb5b602" + integrity "sha1-SNcwoAyVEJ+kOTNScFlU10+1tgI= sha512-ON5kSOJwVO6xXVRTvOI0eOnWe7VdUcIpsovGo9U/Br4Ie4UVFQTboO2cYnDhAGU6Fp+UxSiT+pMft0SMHfuq6w==" dependencies: - "@babel/helper-string-parser" "^7.19.4" - "@babel/helper-validator-identifier" "^7.19.1" + "@babel/helper-string-parser" "^7.23.4" + "@babel/helper-validator-identifier" "^7.22.20" to-fast-properties "^2.0.0" "@colors/colors@1.5.0": version "1.5.0" - resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" - integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== + resolved "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz" + integrity "sha1-u1BFecHK6SPmV2pPXaQ9Jfl729k= sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==" "@datadog/native-appsec@5.0.0": version "5.0.0" - resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-5.0.0.tgz#e42e77f42062532ad7defa3a79090dc8b020c22b" - integrity sha512-Ks8a4L49N40w+TJjj2e9ncGssUIEjo4wnmUFjPBRvlLGuVj1VJLxCx7ztpd8eTycM5QQlzggCDOP6CMEVmeZbA== + resolved "https://registry.npmjs.org/@datadog/native-appsec/-/native-appsec-5.0.0.tgz" + integrity "sha1-5C539CBiUyrX3vo6eQkNyLAgwis= sha512-Ks8a4L49N40w+TJjj2e9ncGssUIEjo4wnmUFjPBRvlLGuVj1VJLxCx7ztpd8eTycM5QQlzggCDOP6CMEVmeZbA==" dependencies: node-gyp-build "^3.9.0" "@datadog/native-iast-rewriter@2.2.1": version "2.2.1" - resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.1.tgz#3c74c5a8caa0b876e091e9c5a95256add0d73e1c" - integrity sha512-DyZlE8gNa5AoOFNKGRJU4RYF/Y/tJzv4bIAMuVBbEnMA0xhiIYqpYQG8T3OKkALl3VSEeBMjYwuOR2fCrJ6gzA== + resolved "https://registry.npmjs.org/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.1.tgz" + integrity "sha1-PHTFqMqguHbgkenFqVJWrdDXPhw= sha512-DyZlE8gNa5AoOFNKGRJU4RYF/Y/tJzv4bIAMuVBbEnMA0xhiIYqpYQG8T3OKkALl3VSEeBMjYwuOR2fCrJ6gzA==" dependencies: lru-cache "^7.14.0" node-gyp-build "^4.5.0" "@datadog/native-iast-taint-tracking@1.6.4": version "1.6.4" - resolved "https://registry.yarnpkg.com/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.6.4.tgz#16c21ad7c36a53420c0d3c5a3720731809cc7e98" - integrity sha512-Owxk7hQ4Dxwv4zJAoMjRga0IvE6lhvxnNc8pJCHsemCWBXchjr/9bqg05Zy5JnMbKUWn4XuZeJD6RFZpRa8bfw== + resolved "https://registry.npmjs.org/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.6.4.tgz" + integrity "sha1-FsIa18NqU0IMDTxaNyBzGAnMfpg= sha512-Owxk7hQ4Dxwv4zJAoMjRga0IvE6lhvxnNc8pJCHsemCWBXchjr/9bqg05Zy5JnMbKUWn4XuZeJD6RFZpRa8bfw==" dependencies: node-gyp-build "^3.9.0" "@datadog/native-metrics@^2.0.0": version "2.0.0" - resolved "https://registry.yarnpkg.com/@datadog/native-metrics/-/native-metrics-2.0.0.tgz#65bf03313ee419956361e097551db36173e85712" + resolved "https://registry.npmjs.org/@datadog/native-metrics/-/native-metrics-2.0.0.tgz" integrity sha512-YklGVwUtmKGYqFf1MNZuOHvTYdKuR4+Af1XkWcMD8BwOAjxmd9Z+97328rCOY8TFUJzlGUPaXzB8j2qgG/BMwA== dependencies: node-addon-api "^6.1.0" @@ -417,8 +444,8 @@ "@datadog/pprof@4.1.0": version "4.1.0" - resolved "https://registry.yarnpkg.com/@datadog/pprof/-/pprof-4.1.0.tgz#db86a720f1dfecbcab8838bc1f148eb0a402af55" - integrity sha512-g7EWI185nwSuFwlmnAGDPxbPsqe+ipOoDB2oP841WMNRaJBPRdg5J90c+6ucmyltuC9VpTrmzzqcachkOTzZEQ== + resolved "https://registry.npmjs.org/@datadog/pprof/-/pprof-4.1.0.tgz" + integrity "sha1-24anIPHf7LyriDi8HxSOsKQCr1U= sha512-g7EWI185nwSuFwlmnAGDPxbPsqe+ipOoDB2oP841WMNRaJBPRdg5J90c+6ucmyltuC9VpTrmzzqcachkOTzZEQ==" dependencies: delay "^5.0.0" node-gyp-build "<4.0" @@ -428,7 +455,7 @@ "@datadog/sketches-js@^2.1.0": version "2.1.0" - resolved "https://registry.yarnpkg.com/@datadog/sketches-js/-/sketches-js-2.1.0.tgz#8c7e8028a5fc22ad102fa542b0a446c956830455" + resolved "https://registry.npmjs.org/@datadog/sketches-js/-/sketches-js-2.1.0.tgz" integrity sha512-smLocSfrt3s53H/XSVP3/1kP42oqvrkjUPtyaFd1F79ux24oE31BKt+q0c6lsa6hOYrFzsIwyc5GXAI5JmfOew== "@esbuild/android-arm64@0.16.12": @@ -448,7 +475,7 @@ "@esbuild/darwin-arm64@0.16.12": version "0.16.12" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.16.12.tgz#ac6c5d85cabf20de5047b55eab7f3c252d9aae71" + resolved "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.16.12.tgz" integrity sha512-Dpe5hOAQiQRH20YkFAg+wOpcd4PEuXud+aGgKBQa/VriPJA8zuVlgCOSTwna1CgYl05lf6o5els4dtuyk1qJxQ== "@esbuild/darwin-x64@0.16.12": @@ -543,7 +570,7 @@ "@eslint/eslintrc@^1.3.2": version "1.3.2" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.3.2.tgz#58b69582f3b7271d8fa67fe5251767a5b38ea356" + resolved "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.3.2.tgz" integrity sha512-AXYd23w1S/bv3fTs3Lz0vjiYemS08jWkI3hYyS9I1ry+0f+Yjs1wm+sU0BS8qDOPrBIkp4qHYC16I8uVtpLajQ== dependencies: ajv "^6.12.4" @@ -558,7 +585,7 @@ "@humanwhocodes/config-array@^0.10.5": version "0.10.5" - resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.10.5.tgz#bb679745224745fff1e9a41961c1d45a49f81c04" + resolved "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.10.5.tgz" integrity sha512-XVVDtp+dVvRxMoxSiSfasYaG02VEe1qH5cKgMQJWhol6HwzbcqoCMJi8dAGoYAO57jhUyhI6cWuRiTcRaDaYug== dependencies: "@humanwhocodes/object-schema" "^1.2.1" @@ -567,22 +594,22 @@ "@humanwhocodes/gitignore-to-minimatch@^1.0.2": version "1.0.2" - resolved "https://registry.yarnpkg.com/@humanwhocodes/gitignore-to-minimatch/-/gitignore-to-minimatch-1.0.2.tgz#316b0a63b91c10e53f242efb4ace5c3b34e8728d" + resolved "https://registry.npmjs.org/@humanwhocodes/gitignore-to-minimatch/-/gitignore-to-minimatch-1.0.2.tgz" integrity sha512-rSqmMJDdLFUsyxR6FMtD00nfQKKLFb1kv+qBbOVKqErvloEIJLo5bDTJTQNTYgeyp78JsA7u/NPi5jT1GR/MuA== "@humanwhocodes/module-importer@^1.0.1": version "1.0.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + resolved "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz" integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== "@humanwhocodes/object-schema@^1.2.1": version "1.2.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + resolved "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz" integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== "@isaacs/import-jsx@^4.0.1": version "4.0.1" - resolved "https://registry.yarnpkg.com/@isaacs/import-jsx/-/import-jsx-4.0.1.tgz#493cab5fc543a0703dba7c3f5947d6499028a169" + resolved "https://registry.npmjs.org/@isaacs/import-jsx/-/import-jsx-4.0.1.tgz" integrity sha512-l34FEsEqpdYdGcQjRCxWy+7rHY6euUbOBz9FI+Mq6oQeVhNegHcXFSJxVxrJvOpO31NbnDjS74quKXDlPDearA== dependencies: "@babel/core" "^7.5.5" @@ -597,7 +624,7 @@ "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" + resolved "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz" integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== dependencies: camelcase "^5.3.1" @@ -608,52 +635,71 @@ "@istanbuljs/schema@^0.1.2": version "0.1.3" - resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.3.tgz#e45e384e4b8ec16bce2fd903af78450f6bf7ec98" + resolved "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz" integrity sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA== "@jridgewell/gen-mapping@^0.1.0": version "0.1.1" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" + resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz" integrity sha512-sQXCasFk+U8lWYEe66WxRDOE9PjVz4vSM51fTu3Hw+ClTpUSQb718772vH3pyS5pShp6lvQM7SxgIDXXXmOX7w== dependencies: "@jridgewell/set-array" "^1.0.0" "@jridgewell/sourcemap-codec" "^1.4.10" +"@jridgewell/gen-mapping@^0.3.0": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.3.tgz#7e02e6eb5df901aaedb08514203b096614024098" + integrity sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ== + dependencies: + "@jridgewell/set-array" "^1.0.1" + "@jridgewell/sourcemap-codec" "^1.4.10" + "@jridgewell/trace-mapping" "^0.3.9" + "@jridgewell/gen-mapping@^0.3.2": version "0.3.2" - resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz#c1aedc61e853f2bb9f5dfe6d4442d3b565b253b9" + resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz" integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== dependencies: "@jridgewell/set-array" "^1.0.1" "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping" "^0.3.9" -"@jridgewell/resolve-uri@3.1.0", "@jridgewell/resolve-uri@^3.0.3": +"@jridgewell/resolve-uri@^3.0.3": version "3.1.0" - resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78" + resolved "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz" integrity sha512-F2msla3tad+Mfht5cJq7LSXcdudKTWCVYUgw6pLFOOHSTtZlj6SWNYAp+AhuqLmWdBO2X5hPrLcu8cVP8fy28w== +"@jridgewell/resolve-uri@^3.1.0": + version "3.1.1" + resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz#c08679063f279615a3326583ba3a90d1d82cc721" + integrity sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA== + "@jridgewell/set-array@^1.0.0", "@jridgewell/set-array@^1.0.1": version "1.1.2" - resolved "https://registry.yarnpkg.com/@jridgewell/set-array/-/set-array-1.1.2.tgz#7c6cf998d6d20b914c0a55a91ae928ff25965e72" + resolved "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.2.tgz" integrity sha512-xnkseuNADM0gt2bs+BvhO0p78Mk762YnZdsuzFV018NoG1Sj1SCQvpSqa7XUaTam5vAGasABV9qXASMKnFMwMw== -"@jridgewell/sourcemap-codec@1.4.14", "@jridgewell/sourcemap-codec@^1.4.10": +"@jridgewell/sourcemap-codec@^1.4.10": version "1.4.14" - resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24" + resolved "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz" integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw== +"@jridgewell/sourcemap-codec@^1.4.14": + version "1.4.15" + resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz#d7c6e6755c78567a951e04ab52ef0fd26de59f32" + integrity sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg== + "@jridgewell/trace-mapping@^0.3.17": - version "0.3.17" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.17.tgz#793041277af9073b0951a7fe0f0d8c4c98c36985" - integrity sha512-MCNzAp77qzKca9+W/+I0+sEpaUnZoeasnghNeVc41VZCEKaCH73Vq3BZZ/SzWIgrqE4H4ceI+p+b6C0mHf9T4g== + version "0.3.20" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.20.tgz#72e45707cf240fa6b081d0366f8265b0cd10197f" + integrity "sha1-cuRXB88kD6awgdA2b4JlsM0QGX8= sha512-R8LcPeWZol2zR8mmH3JeKQ6QRCFb7XgUhV9ZlGhHLGyg4wpPiPZNQOOWhFZhxKw8u//yTbNGI42Bx/3paXEQ+Q==" dependencies: - "@jridgewell/resolve-uri" "3.1.0" - "@jridgewell/sourcemap-codec" "1.4.14" + "@jridgewell/resolve-uri" "^3.1.0" + "@jridgewell/sourcemap-codec" "^1.4.14" "@jridgewell/trace-mapping@^0.3.9": version "0.3.15" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.15.tgz#aba35c48a38d3fd84b37e66c9c0423f9744f9774" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.15.tgz" integrity sha512-oWZNOULl+UbhsgB51uuZzglikfIKSUBO/M9W2OfEjn7cmqoAiCgmv9lyACTUacZwBz0ITnJ2NqjU8Tx0DHL88g== dependencies: "@jridgewell/resolve-uri" "^3.0.3" @@ -661,7 +707,7 @@ "@nodelib/fs.scandir@2.1.5": version "2.1.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + resolved "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz" integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== dependencies: "@nodelib/fs.stat" "2.0.5" @@ -669,12 +715,12 @@ "@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": version "2.0.5" - resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== "@nodelib/fs.walk@^1.2.3": version "1.2.8" - resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== dependencies: "@nodelib/fs.scandir" "2.1.5" @@ -682,44 +728,44 @@ "@opentelemetry/api@^1.0.0": version "1.4.1" - resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.4.1.tgz#ff22eb2e5d476fbc2450a196e40dd243cc20c28f" - integrity sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA== + resolved "https://registry.npmjs.org/@opentelemetry/api/-/api-1.4.1.tgz" + integrity "sha1-/yLrLl1Hb7wkUKGW5A3SQ8wgwo8= sha512-O2yRJce1GOc6PAy3QxFM4NzFiWzvScDC1/5ihYBL6BUEVdq0XMWN01sppE+H6bBXbaFYipjwFLEWLg5PaSOThA==" "@opentelemetry/core@^1.14.0": version "1.14.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.14.0.tgz#64e876b29cb736c984d54164cd47433f513eafd3" - integrity sha512-MnMZ+sxsnlzloeuXL2nm5QcNczt/iO82UOeQQDHhV83F2fP3sgntW2evvtoxJki0MBLxEsh5ADD7PR/Hn5uzjw== + resolved "https://registry.npmjs.org/@opentelemetry/core/-/core-1.14.0.tgz" + integrity "sha1-ZOh2spy3NsmE1UFkzUdDP1E+r9M= sha512-MnMZ+sxsnlzloeuXL2nm5QcNczt/iO82UOeQQDHhV83F2fP3sgntW2evvtoxJki0MBLxEsh5ADD7PR/Hn5uzjw==" dependencies: "@opentelemetry/semantic-conventions" "1.14.0" "@opentelemetry/semantic-conventions@1.14.0": version "1.14.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.14.0.tgz#6a729b7f372ce30f77a3f217c09bc216f863fccb" - integrity sha512-rJfCY8rCWz3cb4KI6pEofnytvMPuj3YLQwoscCCYZ5DkdiPjo15IQ0US7+mjcWy9H3fcZIzf2pbJZ7ck/h4tug== + resolved "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.14.0.tgz" + integrity "sha1-anKbfzcs4w93o/IXwJvCFvhj/Ms= sha512-rJfCY8rCWz3cb4KI6pEofnytvMPuj3YLQwoscCCYZ5DkdiPjo15IQ0US7+mjcWy9H3fcZIzf2pbJZ7ck/h4tug==" "@protobufjs/aspromise@^1.1.1", "@protobufjs/aspromise@^1.1.2": version "1.1.2" - resolved "https://registry.yarnpkg.com/@protobufjs/aspromise/-/aspromise-1.1.2.tgz#9b8b0cc663d669a7d8f6f5d0893a14d348f30fbf" + resolved "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz" integrity sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ== "@protobufjs/base64@^1.1.2": version "1.1.2" - resolved "https://registry.yarnpkg.com/@protobufjs/base64/-/base64-1.1.2.tgz#4c85730e59b9a1f1f349047dbf24296034bb2735" + resolved "https://registry.npmjs.org/@protobufjs/base64/-/base64-1.1.2.tgz" integrity sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg== "@protobufjs/codegen@^2.0.4": version "2.0.4" - resolved "https://registry.yarnpkg.com/@protobufjs/codegen/-/codegen-2.0.4.tgz#7ef37f0d010fb028ad1ad59722e506d9262815cb" + resolved "https://registry.npmjs.org/@protobufjs/codegen/-/codegen-2.0.4.tgz" integrity sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg== "@protobufjs/eventemitter@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz#355cbc98bafad5978f9ed095f397621f1d066b70" + resolved "https://registry.npmjs.org/@protobufjs/eventemitter/-/eventemitter-1.1.0.tgz" integrity sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q== "@protobufjs/fetch@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/fetch/-/fetch-1.1.0.tgz#ba99fb598614af65700c1619ff06d454b0d84c45" + resolved "https://registry.npmjs.org/@protobufjs/fetch/-/fetch-1.1.0.tgz" integrity sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ== dependencies: "@protobufjs/aspromise" "^1.1.1" @@ -727,54 +773,54 @@ "@protobufjs/float@^1.0.2": version "1.0.2" - resolved "https://registry.yarnpkg.com/@protobufjs/float/-/float-1.0.2.tgz#5e9e1abdcb73fc0a7cb8b291df78c8cbd97b87d1" + resolved "https://registry.npmjs.org/@protobufjs/float/-/float-1.0.2.tgz" integrity sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ== "@protobufjs/inquire@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/inquire/-/inquire-1.1.0.tgz#ff200e3e7cf2429e2dcafc1140828e8cc638f089" + resolved "https://registry.npmjs.org/@protobufjs/inquire/-/inquire-1.1.0.tgz" integrity sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q== "@protobufjs/path@^1.1.2": version "1.1.2" - resolved "https://registry.yarnpkg.com/@protobufjs/path/-/path-1.1.2.tgz#6cc2b20c5c9ad6ad0dccfd21ca7673d8d7fbf68d" + resolved "https://registry.npmjs.org/@protobufjs/path/-/path-1.1.2.tgz" integrity sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA== "@protobufjs/pool@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/pool/-/pool-1.1.0.tgz#09fd15f2d6d3abfa9b65bc366506d6ad7846ff54" + resolved "https://registry.npmjs.org/@protobufjs/pool/-/pool-1.1.0.tgz" integrity sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw== "@protobufjs/utf8@^1.1.0": version "1.1.0" - resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" + resolved "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz" integrity sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw== "@sinonjs/commons@^2.0.0": version "2.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-2.0.0.tgz#fd4ca5b063554307e8327b4564bd56d3b73924a3" - integrity sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg== + resolved "https://registry.npmjs.org/@sinonjs/commons/-/commons-2.0.0.tgz" + integrity "sha1-/UylsGNVQwfoMntFZL1W07c5JKM= sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg==" dependencies: type-detect "4.0.8" "@sinonjs/commons@^3.0.0": version "3.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.0.tgz#beb434fe875d965265e04722ccfc21df7f755d72" - integrity sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA== + resolved "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.0.tgz" + integrity "sha1-vrQ0/oddllJl4EcizPwh3391XXI= sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA==" dependencies: type-detect "4.0.8" "@sinonjs/fake-timers@^10.0.2", "@sinonjs/fake-timers@^10.3.0": version "10.3.0" - resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz#55fdff1ecab9f354019129daf4df0dd4d923ea66" - integrity sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA== + resolved "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz" + integrity "sha1-Vf3/Hsq581QBkSna9N8N1Nkj6mY= sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==" dependencies: "@sinonjs/commons" "^3.0.0" "@sinonjs/samsam@^8.0.0": version "8.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/samsam/-/samsam-8.0.0.tgz#0d488c91efb3fa1442e26abea81759dfc8b5ac60" - integrity sha512-Bp8KUVlLp8ibJZrnvq2foVhP0IVX2CIprMJPK0vqGqgrDa0OHVKeZyBykqskkrdxV6yKBPmGasO8LVjAKR3Gew== + resolved "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-8.0.0.tgz" + integrity "sha1-DUiMke+z+hRC4mq+qBdZ38i1rGA= sha512-Bp8KUVlLp8ibJZrnvq2foVhP0IVX2CIprMJPK0vqGqgrDa0OHVKeZyBykqskkrdxV6yKBPmGasO8LVjAKR3Gew==" dependencies: "@sinonjs/commons" "^2.0.0" lodash.get "^4.4.2" @@ -782,56 +828,56 @@ "@sinonjs/text-encoding@^0.7.1": version "0.7.2" - resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz#5981a8db18b56ba38ef0efb7d995b12aa7b51918" + resolved "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz" integrity sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ== "@types/json5@^0.0.29": version "0.0.29" - resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" + resolved "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz" integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== "@types/node@>=13.7.0": version "18.11.9" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.11.9.tgz#02d013de7058cea16d36168ef2fc653464cfbad4" + resolved "https://registry.npmjs.org/@types/node/-/node-18.11.9.tgz" integrity sha512-CRpX21/kGdzjOpFsZSkcrXMGIBWMGNIHXXBVFSH+ggkftxg+XYP20TESbh+zFvFj3EQOl5byk0HTRn1IL6hbqg== "@types/node@>=16": version "18.7.23" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.7.23.tgz#75c580983846181ebe5f4abc40fe9dfb2d65665f" + resolved "https://registry.npmjs.org/@types/node/-/node-18.7.23.tgz" integrity sha512-DWNcCHolDq0ZKGizjx2DZjR/PqsYwAcYUJmfMWqtVU2MBMG5Mo+xFZrhGId5r/O5HOuMPyQEcM6KUBp5lBZZBg== "@types/prop-types@*": version "15.7.5" - resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf" - integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w== + resolved "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.5.tgz" + integrity "sha1-XxnSuFqY6VWANvajysyIGUIPBc8= sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==" "@types/react@^17.0.52": - version "17.0.58" - resolved "https://registry.yarnpkg.com/@types/react/-/react-17.0.58.tgz#c8bbc82114e5c29001548ebe8ed6c4ba4d3c9fb0" - integrity sha512-c1GzVY97P0fGxwGxhYq989j4XwlcHQoto6wQISOC2v6wm3h0PORRWJFHlkRjfGsiG3y1609WdQ+J+tKxvrEd6A== + version "17.0.71" + resolved "https://registry.yarnpkg.com/@types/react/-/react-17.0.71.tgz#3673d446ad482b1564e44bf853b3ab5bcbc942c4" + integrity "sha1-NnPURq1IKxVk5Ev4U7OrW8vJQsQ= sha512-lfqOu9mp16nmaGRrS8deS2Taqhd5Ih0o92Te5Ws6I1py4ytHBcXLqh0YIqVsViqwVI5f+haiFM6hju814BzcmA==" dependencies: "@types/prop-types" "*" "@types/scheduler" "*" csstype "^3.0.2" "@types/scheduler@*": - version "0.16.2" - resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39" - integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== + version "0.16.8" + resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.8.tgz#ce5ace04cfeabe7ef87c0091e50752e36707deff" + integrity "sha1-zlrOBM/qvn74fACR5QdS42cH3v8= sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A==" "@types/yoga-layout@1.9.2": version "1.9.2" - resolved "https://registry.yarnpkg.com/@types/yoga-layout/-/yoga-layout-1.9.2.tgz#efaf9e991a7390dc081a0b679185979a83a9639a" + resolved "https://registry.npmjs.org/@types/yoga-layout/-/yoga-layout-1.9.2.tgz" integrity sha512-S9q47ByT2pPvD65IvrWp7qppVMpk9WGMbVq9wbWZOHg6tnXSD4vyhao6nOSBwwfDdV2p3Kx9evA9vI+XWTfDvw== "@ungap/promise-all-settled@1.1.2": version "1.1.2" - resolved "https://registry.yarnpkg.com/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz#aa58042711d6e3275dd37dc597e5d31e8c290a44" + resolved "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz" integrity sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q== accepts@~1.3.8: version "1.3.8" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e" + resolved "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz" integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw== dependencies: mime-types "~2.1.34" @@ -839,27 +885,27 @@ accepts@~1.3.8: acorn-import-assertions@^1.9.0: version "1.9.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" + resolved "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz" integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== acorn-jsx@^5.3.2: version "5.3.2" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + resolved "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz" integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== acorn@^8.8.0: version "8.8.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.0.tgz#88c0187620435c7f6015803f5539dae05a9dbea8" + resolved "https://registry.npmjs.org/acorn/-/acorn-8.8.0.tgz" integrity sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w== acorn@^8.8.2: version "8.10.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.10.0.tgz#8be5b3907a67221a81ab23c7889c4c5526b62ec5" - integrity sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw== + resolved "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz" + integrity "sha1-i+WzkHpnIhqBqyPHiJxMVSa2LsU= sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==" aggregate-error@^3.0.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/aggregate-error/-/aggregate-error-3.1.0.tgz#92670ff50f5359bdb7a3e0d40d0ec30c5737687a" + resolved "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz" integrity sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA== dependencies: clean-stack "^2.0.0" @@ -867,7 +913,7 @@ aggregate-error@^3.0.0: ajv@^6.10.0, ajv@^6.12.4: version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== dependencies: fast-deep-equal "^3.1.1" @@ -877,48 +923,48 @@ ajv@^6.10.0, ajv@^6.12.4: ansi-colors@4.1.1: version "4.1.1" - resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" + resolved "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz" integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== ansi-escapes@^4.2.1: version "4.3.2" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + resolved "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz" integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== dependencies: type-fest "^0.21.3" ansi-regex@^3.0.0: version "3.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.1.tgz#123d6479e92ad45ad897d4054e3c7ca7db4944e1" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.1.tgz" integrity sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw== ansi-regex@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" + resolved "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== ansi-styles@^3.2.1: version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" ansi-styles@^4.0.0, ansi-styles@^4.1.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" + resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz" integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== dependencies: color-convert "^2.0.1" ansicolors@~0.3.2: version "0.3.2" - resolved "https://registry.yarnpkg.com/ansicolors/-/ansicolors-0.3.2.tgz#665597de86a9ffe3aa9bfbe6cae5c6ea426b4979" + resolved "https://registry.npmjs.org/ansicolors/-/ansicolors-0.3.2.tgz" integrity sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg== anymatch@~3.1.1: version "3.1.2" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" + resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz" integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== dependencies: normalize-path "^3.0.0" @@ -926,7 +972,7 @@ anymatch@~3.1.1: anymatch@~3.1.2: version "3.1.3" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e" + resolved "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz" integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw== dependencies: normalize-path "^3.0.0" @@ -934,49 +980,49 @@ anymatch@~3.1.2: append-field@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/append-field/-/append-field-1.0.0.tgz#1e3440e915f0b1203d23748e78edd7b9b5b43e56" + resolved "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz" integrity sha512-klpgFSWLW1ZEs8svjfb7g4qWY0YS5imI82dTg+QahUvJ8YqAY0P10Uk8tTyh9ZGuYEZEMaeJYCF5BFuX552hsw== append-transform@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/append-transform/-/append-transform-2.0.0.tgz#99d9d29c7b38391e6f428d28ce136551f0b77e12" + resolved "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz" integrity sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg== dependencies: default-require-extensions "^3.0.0" archy@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/archy/-/archy-1.0.0.tgz#f9c8c13757cc1dd7bc379ac77b2c62a5c2868c40" + resolved "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz" integrity sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw== argparse@^1.0.7: version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" + resolved "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz" integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== dependencies: sprintf-js "~1.0.2" argparse@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + resolved "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz" integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== array-buffer-byte-length@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead" - integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A== + resolved "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz" + integrity "sha1-+r6LwZP+qGXzF/54Bwhe4N7lrq0= sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==" dependencies: call-bind "^1.0.2" is-array-buffer "^3.0.1" array-flatten@1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" + resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== array-includes@^3.1.4: version "3.1.5" - resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.5.tgz#2c320010db8d31031fd2a5f6b3bbd4b1aad31bdb" + resolved "https://registry.npmjs.org/array-includes/-/array-includes-3.1.5.tgz" integrity sha512-iSDYZMMyTPkiFasVqfuAQnWAYcvO/SeBSCGKePoEthjp4LEMTe4uLc7b025o4jAZpHhihh8xPo99TNWUWWkGDQ== dependencies: call-bind "^1.0.2" @@ -987,13 +1033,13 @@ array-includes@^3.1.4: array-union@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" + resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== array.prototype.every@^1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/array.prototype.every/-/array.prototype.every-1.1.4.tgz#2762daecd9cec87cb63f3ca6be576817074a684e" - integrity sha512-Aui35iRZk1HHLRAyF7QP0KAnOnduaQ6fo6k1NVWfRc0xTs2AZ70ytlXvOmkC6Di4JmUs2Wv3DYzGtCQFSk5uGg== + resolved "https://registry.npmjs.org/array.prototype.every/-/array.prototype.every-1.1.4.tgz" + integrity "sha1-J2La7NnOyHy2PzymvldoFwdKaE4= sha512-Aui35iRZk1HHLRAyF7QP0KAnOnduaQ6fo6k1NVWfRc0xTs2AZ70ytlXvOmkC6Di4JmUs2Wv3DYzGtCQFSk5uGg==" dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -1002,7 +1048,7 @@ array.prototype.every@^1.1.4: array.prototype.flat@^1.2.5: version "1.3.0" - resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" + resolved "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz" integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== dependencies: call-bind "^1.0.2" @@ -1012,8 +1058,8 @@ array.prototype.flat@^1.2.5: arraybuffer.prototype.slice@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.1.tgz#9b5ea3868a6eebc30273da577eb888381c0044bb" - integrity sha512-09x0ZWFEjj4WD8PDbykUwo3t9arLn8NIzmmYEJFpYekOAQjpkGSyrQhNoRTcwwcFRu+ycWF78QZ63oWTqSjBcw== + resolved "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.1.tgz" + integrity "sha1-m16jhopu68MCc9pXfriIOBwARLs= sha512-09x0ZWFEjj4WD8PDbykUwo3t9arLn8NIzmmYEJFpYekOAQjpkGSyrQhNoRTcwwcFRu+ycWF78QZ63oWTqSjBcw==" dependencies: array-buffer-byte-length "^1.0.0" call-bind "^1.0.2" @@ -1024,32 +1070,32 @@ arraybuffer.prototype.slice@^1.0.1: assertion-error@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b" + resolved "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz" integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw== astral-regex@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31" + resolved "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz" integrity sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ== async-hook-domain@^2.0.4: version "2.0.4" - resolved "https://registry.yarnpkg.com/async-hook-domain/-/async-hook-domain-2.0.4.tgz#5a24910982c04394ea33dd442860f80cce2d972c" + resolved "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-2.0.4.tgz" integrity sha512-14LjCmlK1PK8eDtTezR6WX8TMaYNIzBIsd2D1sGoGjgx0BuNMMoSdk7i/drlbtamy0AWv9yv2tkB+ASdmeqFIw== asynckit@^0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" + resolved "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz" integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q== auto-bind@4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/auto-bind/-/auto-bind-4.0.0.tgz#e3589fc6c2da8f7ca43ba9f84fa52a744fc997fb" + resolved "https://registry.npmjs.org/auto-bind/-/auto-bind-4.0.0.tgz" integrity sha512-Hdw8qdNiqdJ8LqT0iK0sVzkFbzg6fhnQqqfWhBDxcHZvU75+B+ayzTy8x+k5Ix0Y92XOhOUlx74ps+bA6BeYMQ== autocannon@^4.5.2: version "4.6.0" - resolved "https://registry.yarnpkg.com/autocannon/-/autocannon-4.6.0.tgz#01c18e211444bd523c97da4ff7ff83cd25035333" + resolved "https://registry.npmjs.org/autocannon/-/autocannon-4.6.0.tgz" integrity sha512-pWHEBJh9bkQeDXYj1NL2BBYeXTaLkbRiy3NZ7vNR1bq7vWxHP8R+iCmDyBCtuh2PMJiWlGlikXa1p0LUUY3Tdw== dependencies: chalk "^3.0.0" @@ -1074,13 +1120,13 @@ autocannon@^4.5.2: available-typed-arrays@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" - integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== + resolved "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz" + integrity "sha1-kvlWFlAQadB9EO2y/DfT4cZRI7c= sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==" aws-sdk@^2.1446.0: version "2.1477.0" - resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1477.0.tgz#ec878ea5584fee217eb02ec8f6ebfd9ace47f908" - integrity sha512-DLsrKosrKRe5P1E+BcJAVpOXkma4oUOrcyBUridDmUhdf9k3jj5dnL1roFuDpTmNDDhK8a1tUgY3wmXoKQtv7A== + resolved "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1477.0.tgz" + integrity "sha1-7IeOpVhP7iF+sC7I9uv9ms5H+Qg= sha512-DLsrKosrKRe5P1E+BcJAVpOXkma4oUOrcyBUridDmUhdf9k3jj5dnL1roFuDpTmNDDhK8a1tUgY3wmXoKQtv7A==" dependencies: buffer "4.9.2" events "1.1.1" @@ -1095,24 +1141,24 @@ aws-sdk@^2.1446.0: axios@^0.21.2: version "0.21.4" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575" + resolved "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz" integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg== dependencies: follow-redirects "^1.14.0" balanced-match@^1.0.0: version "1.0.2" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" + resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== base64-js@^1.0.2, base64-js@^1.2.0: version "1.5.1" - resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" + resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== benchmark@^2.1.4: version "2.1.4" - resolved "https://registry.yarnpkg.com/benchmark/-/benchmark-2.1.4.tgz#09f3de31c916425d498cc2ee565a0ebf3c2a5629" + resolved "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz" integrity sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ== dependencies: lodash "^4.17.4" @@ -1120,18 +1166,18 @@ benchmark@^2.1.4: binary-extensions@^2.0.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" + resolved "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz" integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== bind-obj-methods@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz#65b66544d9d668d80dfefe2089dd347ad1dbcaed" + resolved "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz" integrity sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw== body-parser@1.20.1: version "1.20.1" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" - integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== + resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz" + integrity "sha1-sYEqiRLBlc03Gj7l5m+qIzilxmg= sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==" dependencies: bytes "3.1.2" content-type "~1.0.4" @@ -1148,8 +1194,8 @@ body-parser@1.20.1: body-parser@^1.20.2: version "1.20.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" - integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== + resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz" + integrity "sha1-b+sOIcRyTQbef/ONo22tT1enR/0= sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==" dependencies: bytes "3.1.2" content-type "~1.0.5" @@ -1166,7 +1212,7 @@ body-parser@^1.20.2: brace-expansion@^1.1.7: version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" + resolved "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz" integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== dependencies: balanced-match "^1.0.0" @@ -1174,19 +1220,19 @@ brace-expansion@^1.1.7: braces@^3.0.2, braces@~3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" + resolved "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz" integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== dependencies: fill-range "^7.0.1" browser-stdout@1.3.1: version "1.3.1" - resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60" + resolved "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz" integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw== browserslist@^4.21.3: version "4.21.4" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.21.4.tgz#e7496bbc67b9e39dd0f98565feccdcb0d4ff6987" + resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz" integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== dependencies: caniuse-lite "^1.0.30001400" @@ -1194,15 +1240,25 @@ browserslist@^4.21.3: node-releases "^2.0.6" update-browserslist-db "^1.0.9" +browserslist@^4.21.9: + version "4.22.2" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.22.2.tgz#704c4943072bd81ea18997f3bd2180e89c77874b" + integrity "sha1-cExJQwcr2B6hiZfzvSGA6Jx3h0s= sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A==" + dependencies: + caniuse-lite "^1.0.30001565" + electron-to-chromium "^1.4.601" + node-releases "^2.0.14" + update-browserslist-db "^1.0.13" + buffer-from@^1.0.0: version "1.1.2" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" + resolved "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== buffer@4.9.2: version "4.9.2" - resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" - integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== + resolved "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz" + integrity "sha1-Iw6tNEACmIZEhBqwJEr4xEu+Pvg= sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==" dependencies: base64-js "^1.0.2" ieee754 "^1.1.4" @@ -1210,26 +1266,26 @@ buffer@4.9.2: builtins@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/builtins/-/builtins-5.0.1.tgz#87f6db9ab0458be728564fa81d876d8d74552fa9" + resolved "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz" integrity sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ== dependencies: semver "^7.0.0" busboy@^1.0.0: version "1.6.0" - resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" + resolved "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz" integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== dependencies: streamsearch "^1.1.0" bytes@3.1.2: version "3.1.2" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" + resolved "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz" integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg== caching-transform@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/caching-transform/-/caching-transform-4.0.0.tgz#00d297a4206d71e2163c39eaffa8157ac0651f0f" + resolved "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz" integrity sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA== dependencies: hasha "^5.0.0" @@ -1239,7 +1295,7 @@ caching-transform@^4.0.0: call-bind@^1.0.0, call-bind@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" + resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz" integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== dependencies: function-bind "^1.1.1" @@ -1247,8 +1303,8 @@ call-bind@^1.0.0, call-bind@^1.0.2: call-bind@^1.0.4: version "1.0.5" - resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513" - integrity sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ== + resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.5.tgz" + integrity "sha1-b6K3hFzg6km/TYue9kcnosLi5RM= sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==" dependencies: function-bind "^1.1.2" get-intrinsic "^1.2.1" @@ -1256,41 +1312,46 @@ call-bind@^1.0.4: caller-callsite@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/caller-callsite/-/caller-callsite-4.1.0.tgz#3e33cb1d910e7b09332d59a3503b9af7462f7295" + resolved "https://registry.npmjs.org/caller-callsite/-/caller-callsite-4.1.0.tgz" integrity sha512-99nnnGlJexTc41xwQTr+mWl15OI5PPczUJzM4YRE7QjkefMKCXGa5gfQjCOuVrD+1TjI/fevIDHg2nz3iYN5Ig== dependencies: callsites "^3.1.0" caller-path@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/caller-path/-/caller-path-3.0.1.tgz#bc932ecec3f943e10c2f8922146e23b132f932e4" + resolved "https://registry.npmjs.org/caller-path/-/caller-path-3.0.1.tgz" integrity sha512-fhmztL4wURO/BzwJUJ4aVRdnKEFskPBbrJ8fNgl7XdUiD1ygzzlt+nhPgUBSRq2ciEVubo6x+W8vJQzm55QLLQ== dependencies: caller-callsite "^4.1.0" callsites@^3.0.0, callsites@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== camelcase@^5.0.0, camelcase@^5.3.1: version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" + resolved "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz" integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== camelcase@^6.0.0: version "6.3.0" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-6.3.0.tgz#5685b95eb209ac9c0c177467778c9c84df58ba9a" + resolved "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz" integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== caniuse-lite@^1.0.30001400: version "1.0.30001412" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001412.tgz#30f67d55a865da43e0aeec003f073ea8764d5d7c" + resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001412.tgz" integrity sha512-+TeEIee1gS5bYOiuf+PS/kp2mrXic37Hl66VY6EAfxasIk5fELTktK2oOezYed12H8w7jt3s512PpulQidPjwA== +caniuse-lite@^1.0.30001565: + version "1.0.30001566" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001566.tgz#61a8e17caf3752e3e426d4239c549ebbb37fef0d" + integrity "sha1-YajhfK83UuPkJtQjnFSeu7N/7w0= sha512-ggIhCsTxmITBAMmK8yZjEhCO5/47jKXPu6Dha/wuCS4JePVL+3uiDEBuhu2aIoT+bqTOR8L76Ip1ARL9xYsEJA==" + cardinal@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/cardinal/-/cardinal-2.1.1.tgz#7cc1055d822d212954d07b085dea251cc7bc5505" + resolved "https://registry.npmjs.org/cardinal/-/cardinal-2.1.1.tgz" integrity sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw== dependencies: ansicolors "~0.3.2" @@ -1298,8 +1359,8 @@ cardinal@^2.1.1: chai@^4.3.7: version "4.3.7" - resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.7.tgz#ec63f6df01829088e8bf55fca839bcd464a8ec51" - integrity sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A== + resolved "https://registry.npmjs.org/chai/-/chai-4.3.7.tgz" + integrity "sha1-7GP23wGCkIjov1X8qDm81GSo7FE= sha512-HLnAzZ2iupm25PlN0xFreAlBA5zaBSv3og0DdeGA4Ar6h6rJ3A0rolRUKJhSF2V10GZKDgWF/VmAEsNWjCRB+A==" dependencies: assertion-error "^1.1.0" check-error "^1.0.2" @@ -1309,9 +1370,9 @@ chai@^4.3.7: pathval "^1.1.1" type-detect "^4.0.5" -chalk@^2.0.0: +chalk@^2.0.0, chalk@^2.4.2: version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" + resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" @@ -1320,7 +1381,7 @@ chalk@^2.0.0: chalk@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" + resolved "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz" integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== dependencies: ansi-styles "^4.1.0" @@ -1328,7 +1389,7 @@ chalk@^3.0.0: chalk@^4.0.0, chalk@^4.1.0: version "4.1.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" + resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== dependencies: ansi-styles "^4.1.0" @@ -1336,24 +1397,24 @@ chalk@^4.0.0, chalk@^4.1.0: chalk@^5.3.0: version "5.3.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385" - integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w== + resolved "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz" + integrity "sha1-Z8IKfr73Dn85cKAfkPohDLaGA4U= sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==" check-error@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82" + resolved "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz" integrity sha512-BrgHpW9NURQgzoNyjfq0Wu6VFO6D7IZEmJNdtgNqpzGG8RuNFHt2jQxWlAs4HMe119chBnv+34syEZtc6IhLtA== checksum@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/checksum/-/checksum-1.0.0.tgz#39d9b5eef273a6a53203900f6780ee8881ab39e5" - integrity sha512-68bHejnM/sBQhjXcXd2mFusICnqAwikZ9RVMURIacWh7moNjgOdHKimS6yk30Np/PwfR00dceY4b1GwWanu5cg== + resolved "https://registry.npmjs.org/checksum/-/checksum-1.0.0.tgz" + integrity "sha1-Odm17vJzpqUyA5APZ4DuiIGrOeU= sha512-68bHejnM/sBQhjXcXd2mFusICnqAwikZ9RVMURIacWh7moNjgOdHKimS6yk30Np/PwfR00dceY4b1GwWanu5cg==" dependencies: optimist "~0.3.5" chokidar@3.5.1: version "3.5.1" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.1.tgz#ee9ce7bbebd2b79f49f304799d5468e31e14e68a" + resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz" integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw== dependencies: anymatch "~3.1.1" @@ -1368,7 +1429,7 @@ chokidar@3.5.1: chokidar@^3.3.0: version "3.5.3" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd" + resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz" integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw== dependencies: anymatch "~3.1.2" @@ -1383,34 +1444,34 @@ chokidar@^3.3.0: ci-info@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" + resolved "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz" integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== cjs-module-lexer@^1.2.2: version "1.2.3" - resolved "https://registry.yarnpkg.com/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz#6c370ab19f8a3394e318fe682686ec0ac684d107" - integrity sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ== + resolved "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.2.3.tgz" + integrity "sha1-bDcKsZ+KM5TjGP5oJobsCsaE0Qc= sha512-0TNiGstbQmCFwt4akjjBg5pLRTSyj/PkWQ1ZoO2zntmg9yLqSRxwEa4iCfQLGjqhiqBfOJa7W/E8wfGrTDmlZQ==" clean-stack@^2.0.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/clean-stack/-/clean-stack-2.2.0.tgz#ee8472dbb129e727b31e8a10a427dee9dfe4008b" + resolved "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz" integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== cli-boxes@^2.2.0: version "2.2.1" - resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f" + resolved "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz" integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw== cli-cursor@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" + resolved "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz" integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== dependencies: restore-cursor "^3.1.0" cli-table3@^0.5.1: version "0.5.1" - resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.5.1.tgz#0252372d94dfc40dbd8df06005f48f31f656f202" + resolved "https://registry.npmjs.org/cli-table3/-/cli-table3-0.5.1.tgz" integrity sha512-7Qg2Jrep1S/+Q3EceiZtQcDPWxhAvBw+ERf1162v4sikJrvojMHFqXt8QIVha8UlH9rgU0BeWPytZ9/TzYqlUw== dependencies: object-assign "^4.1.0" @@ -1420,8 +1481,8 @@ cli-table3@^0.5.1: cli-table3@^0.6.3: version "0.6.3" - resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.6.3.tgz#61ab765aac156b52f222954ffc607a6f01dbeeb2" - integrity sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg== + resolved "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.3.tgz" + integrity "sha1-Yat2WqwVa1LyIpVP/GB6bwHb7rI= sha512-w5Jac5SykAeZJKntOxJCrm63Eg5/4dhMWIcuTbo9rpE+brgaSZo0RuNJZeOyMgsUdhDeojvgyQLmjI+K50ZGyg==" dependencies: string-width "^4.2.0" optionalDependencies: @@ -1429,7 +1490,7 @@ cli-table3@^0.6.3: cli-truncate@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/cli-truncate/-/cli-truncate-2.1.0.tgz#c39e28bf05edcde5be3b98992a22deed5a2b93c7" + resolved "https://registry.npmjs.org/cli-truncate/-/cli-truncate-2.1.0.tgz" integrity sha512-n8fOixwDD6b/ObinzTrp1ZKFzbgvKZvuz/TvejnLn1aQfC6r52XEx85FmuC+3HI+JM7coBRXUvNqEU2PHVrHpg== dependencies: slice-ansi "^3.0.0" @@ -1437,7 +1498,7 @@ cli-truncate@^2.1.0: cliui@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" + resolved "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz" integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== dependencies: string-width "^4.2.0" @@ -1446,7 +1507,7 @@ cliui@^6.0.0: cliui@^7.0.2, cliui@^7.0.4: version "7.0.4" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + resolved "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz" integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== dependencies: string-width "^4.2.0" @@ -1455,80 +1516,80 @@ cliui@^7.0.2, cliui@^7.0.4: clone@^2.1.2: version "2.1.2" - resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f" + resolved "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz" integrity sha512-3Pe/CF1Nn94hyhIYpjtiLhdCoEoz0DqQ+988E9gmeEdQZlojxnOb74wctFyuwWQHzqyf9X7C7MG8juUpqBJT8w== code-excerpt@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/code-excerpt/-/code-excerpt-3.0.0.tgz#fcfb6748c03dba8431c19f5474747fad3f250f10" + resolved "https://registry.npmjs.org/code-excerpt/-/code-excerpt-3.0.0.tgz" integrity sha512-VHNTVhd7KsLGOqfX3SyeO8RyYPMp1GJOg194VITk04WMYCv4plV68YWe6TJZxd9MhobjtpMRnVky01gqZsalaw== dependencies: convert-to-spaces "^1.0.1" color-convert@^1.9.0: version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" color-convert@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" + resolved "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz" integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== dependencies: color-name "~1.1.4" color-name@1.1.3: version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== color-name@~1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== color-support@^1.1.0, color-support@^1.1.1: version "1.1.3" - resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2" + resolved "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz" integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg== colorette@2.0.19: version "2.0.19" - resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798" + resolved "https://registry.npmjs.org/colorette/-/colorette-2.0.19.tgz" integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ== colors@^1.1.2: version "1.4.0" - resolved "https://registry.yarnpkg.com/colors/-/colors-1.4.0.tgz#c50491479d4c1bdaed2c9ced32cf7c7dc2360f78" + resolved "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz" integrity sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA== combined-stream@^1.0.6: version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" + resolved "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz" integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== dependencies: delayed-stream "~1.0.0" commander@^9.1.0: version "9.5.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-9.5.0.tgz#bc08d1eb5cedf7ccb797a96199d41c7bc3e60d30" + resolved "https://registry.npmjs.org/commander/-/commander-9.5.0.tgz" integrity sha512-KRs7WVDKg86PWiuAqhDrAQnTXZKraVcCc6vFdL14qrZ/DcWwuRo7VoiYXalXO7S5GKpqYiVEwCbgFDfxNHKJBQ== commondir@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" + resolved "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz" integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== concat-map@0.0.1: version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" + resolved "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz" integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg== concat-stream@^1.5.2: version "1.6.2" - resolved "https://registry.yarnpkg.com/concat-stream/-/concat-stream-1.6.2.tgz#904bdf194cd3122fc675c77fc4ac3d4ff0fd1a34" + resolved "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz" integrity sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw== dependencies: buffer-from "^1.0.0" @@ -1538,56 +1599,61 @@ concat-stream@^1.5.2: content-disposition@0.5.4: version "0.5.4" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe" + resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz" integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ== dependencies: safe-buffer "5.2.1" content-type@~1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" + resolved "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== content-type@~1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" - integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== + resolved "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz" + integrity "sha1-i3cxYmVtHRCGeEyPI6VM5tc9eRg= sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==" convert-source-map@^1.7.0: version "1.8.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" + resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz" integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== dependencies: safe-buffer "~5.1.1" +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== + convert-to-spaces@^1.0.1: version "1.0.2" - resolved "https://registry.yarnpkg.com/convert-to-spaces/-/convert-to-spaces-1.0.2.tgz#7e3e48bbe6d997b1417ddca2868204b4d3d85715" + resolved "https://registry.npmjs.org/convert-to-spaces/-/convert-to-spaces-1.0.2.tgz" integrity sha512-cj09EBuObp9gZNQCzc7hByQyrs6jVGE+o9kSJmeUoj+GiPiJvi5LYqEH/Hmme4+MTLHM+Ejtq+FChpjjEnsPdQ== cookie-signature@1.0.6: version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" + resolved "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== cookie@0.5.0: version "0.5.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" + resolved "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz" integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== core-util-is@~1.0.0: version "1.0.3" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.3.tgz#a6042d3634c2b27e9328f837b965fac83808db85" + resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== cross-argv@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/cross-argv/-/cross-argv-1.0.0.tgz#e7221e9ff73092a80496c699c8c45efb20f6486c" + resolved "https://registry.npmjs.org/cross-argv/-/cross-argv-1.0.0.tgz" integrity sha512-uAVe/bgNHlPdP1VE4Sk08u9pAJ7o1x/tVQtX77T5zlhYhuwOWtVkPBEtHdvF5cq48VzeCG5i1zN4dQc8pwLYrw== cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== dependencies: path-key "^3.1.0" @@ -1596,68 +1662,68 @@ cross-spawn@^7.0.0, cross-spawn@^7.0.2, cross-spawn@^7.0.3: crypto-randomuuid@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/crypto-randomuuid/-/crypto-randomuuid-1.0.0.tgz#acf583e5e085e867ae23e107ff70279024f9e9e7" + resolved "https://registry.npmjs.org/crypto-randomuuid/-/crypto-randomuuid-1.0.0.tgz" integrity sha512-/RC5F4l1SCqD/jazwUF6+t34Cd8zTSAGZ7rvvZu1whZUhD2a5MOGKjSGowoGcpj/cbVZk1ZODIooJEQQq3nNAA== csstype@^3.0.2: - version "3.1.1" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.1.tgz#841b532c45c758ee546a11d5bd7b7b473c8c30b9" - integrity sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw== + version "3.1.3" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" + integrity "sha1-2A/ylNEU+w5qxQD7+FtgE31+/4E= sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==" dc-polyfill@^0.1.2: version "0.1.2" - resolved "https://registry.yarnpkg.com/dc-polyfill/-/dc-polyfill-0.1.2.tgz#99a2f120759317b9976999aa715183a1c44b1327" - integrity sha512-AJ4TWwkeOKF7+Wj301wdyK8L0D9SE8Fr7+eMein8UP8+Iyb1xuL3rXWXavsTEM1+vOqDLciYho4cpsvNY0RDGQ== + resolved "https://registry.npmjs.org/dc-polyfill/-/dc-polyfill-0.1.2.tgz" + integrity "sha1-maLxIHWTF7mXaZmqcVGDocRLEyc= sha512-AJ4TWwkeOKF7+Wj301wdyK8L0D9SE8Fr7+eMein8UP8+Iyb1xuL3rXWXavsTEM1+vOqDLciYho4cpsvNY0RDGQ==" debug@2.6.9, debug@^2.6.9: version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" + resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" debug@4.3.1: version "4.3.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" + resolved "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz" integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== dependencies: ms "2.1.2" debug@4.3.4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2: version "4.3.4" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" + resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" debug@^3.2.7: version "3.2.7" - resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" + resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== dependencies: ms "^2.1.1" decamelize@^1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" + resolved "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz" integrity sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA== decamelize@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-4.0.0.tgz#aa472d7bf660eb15f3494efd531cab7f2a709837" + resolved "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz" integrity sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ== deep-eql@^4.1.2: version "4.1.3" - resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-4.1.3.tgz#7c7775513092f7df98d8df9996dd085eb668cc6d" - integrity sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw== + resolved "https://registry.npmjs.org/deep-eql/-/deep-eql-4.1.3.tgz" + integrity "sha1-fHd1UTCS99+Y2N+Zlt0IXrZozG0= sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==" dependencies: type-detect "^4.0.0" deep-equal@^2.2.2: version "2.2.2" - resolved "https://registry.yarnpkg.com/deep-equal/-/deep-equal-2.2.2.tgz#9b2635da569a13ba8e1cc159c2f744071b115daa" - integrity sha512-xjVyBf0w5vH0I42jdAZzOKVldmPgSulmiyPRywoyq7HXC9qdgo17kxJE+rdnif5Tz6+pIrpJI8dCpMNLIGkUiA== + resolved "https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.2.tgz" + integrity "sha1-myY12laaE7qOHMFZwvdEBxsRXao= sha512-xjVyBf0w5vH0I42jdAZzOKVldmPgSulmiyPRywoyq7HXC9qdgo17kxJE+rdnif5Tz6+pIrpJI8dCpMNLIGkUiA==" dependencies: array-buffer-byte-length "^1.0.0" call-bind "^1.0.2" @@ -1680,20 +1746,20 @@ deep-equal@^2.2.2: deep-is@^0.1.3: version "0.1.4" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" + resolved "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== default-require-extensions@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/default-require-extensions/-/default-require-extensions-3.0.0.tgz#e03f93aac9b2b6443fc52e5e4a37b3ad9ad8df96" + resolved "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.0.tgz" integrity sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg== dependencies: strip-bom "^4.0.0" define-data-property@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3" - integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ== + resolved "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz" + integrity "sha1-w1980KsJiDSA0SrFyyE3FVh4ALM= sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==" dependencies: get-intrinsic "^1.2.1" gopd "^1.0.1" @@ -1701,7 +1767,7 @@ define-data-property@^1.1.1: define-properties@^1.1.3, define-properties@^1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + resolved "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz" integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== dependencies: has-property-descriptors "^1.0.0" @@ -1709,113 +1775,118 @@ define-properties@^1.1.3, define-properties@^1.1.4: define-properties@^1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.0.tgz#52988570670c9eacedd8064f4a990f2405849bd5" - integrity sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA== + resolved "https://registry.npmjs.org/define-properties/-/define-properties-1.2.0.tgz" + integrity "sha1-UpiFcGcMnqzt2AZPSpkPJAWEm9U= sha512-xvqAVKGfT1+UAvPwKTVw/njhdQ8ZhXK4lI0bCIuCMrp2up9nPnaDftrLtmpTazqd1o+UY4zgzU+avtMbDP+ldA==" dependencies: has-property-descriptors "^1.0.0" object-keys "^1.1.1" defined@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.1.tgz#c0b9db27bfaffd95d6f61399419b893df0f91ebf" - integrity sha512-hsBd2qSVCRE+5PmNdHt1uzyrFu5d3RwmFDKzyNZMFq/EwDNJF7Ee5+D5oEKF0hU6LhtoUF1macFvOe4AskQC1Q== + resolved "https://registry.npmjs.org/defined/-/defined-1.0.1.tgz" + integrity "sha1-wLnbJ7+v/ZXW9hOZQZuJPfD5Hr8= sha512-hsBd2qSVCRE+5PmNdHt1uzyrFu5d3RwmFDKzyNZMFq/EwDNJF7Ee5+D5oEKF0hU6LhtoUF1macFvOe4AskQC1Q==" delay@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/delay/-/delay-5.0.0.tgz#137045ef1b96e5071060dd5be60bf9334436bd1d" + resolved "https://registry.npmjs.org/delay/-/delay-5.0.0.tgz" integrity sha512-ReEBKkIfe4ya47wlPYf/gu5ib6yUG0/Aez0JQZQz94kiWtRQvZIQbTiehsnwHvLSWJnQdhVeqYue7Id1dKr0qw== delayed-stream@~1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== depd@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== destroy@1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + resolved "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz" integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== detect-newline@^3.0.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" - integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== + resolved "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz" + integrity "sha1-V29d/GOuGhkv8ZLYrTr2MImRtlE= sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==" diff@5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-5.0.0.tgz#7ed6ad76d859d030787ec35855f5b1daf31d852b" + resolved "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz" integrity sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w== diff@^4.0.1, diff@^4.0.2: version "4.0.2" - resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" + resolved "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== diff@^5.1.0: version "5.1.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-5.1.0.tgz#bc52d298c5ea8df9194800224445ed43ffc87e40" + resolved "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz" integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== dir-glob@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/dir-glob/-/dir-glob-3.0.1.tgz#56dbf73d992a4a93ba1584f4534063fd2e41717f" + resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== dependencies: path-type "^4.0.0" doctrine@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" + resolved "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz" integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== dependencies: esutils "^2.0.2" doctrine@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + resolved "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz" integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== dependencies: esutils "^2.0.2" dotenv@16.3.1: version "16.3.1" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" - integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== + resolved "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz" + integrity "sha1-NpA03n1+WxIJcmkzUqO/ESFyzD4= sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==" dotignore@^0.1.2: version "0.1.2" - resolved "https://registry.yarnpkg.com/dotignore/-/dotignore-0.1.2.tgz#f942f2200d28c3a76fbdd6f0ee9f3257c8a2e905" + resolved "https://registry.npmjs.org/dotignore/-/dotignore-0.1.2.tgz" integrity sha512-UGGGWfSauusaVJC+8fgV+NVvBXkCTmVv7sk6nojDZZvuOUNGUy0Zk4UpHQD6EDjS0jpBwcACvH4eofvyzBcRDw== dependencies: minimatch "^3.0.4" ee-first@1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" + resolved "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz" integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== electron-to-chromium@^1.4.251: version "1.4.264" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.264.tgz#2f68a062c38b7a04bf57f3e6954b868672fbdcd3" + resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.264.tgz" integrity sha512-AZ6ZRkucHOQT8wke50MktxtmcWZr67kE17X/nAXFf62NIdMdgY6xfsaJD5Szoy84lnkuPWH+4tTNE3s2+bPCiw== +electron-to-chromium@^1.4.601: + version "1.4.608" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.608.tgz#ff567c51dde4892ae330860c7d9f19571e9e1d69" + integrity "sha1-/1Z8Ud3kiSrjMIYMfZ8ZVx6eHWk= sha512-J2f/3iIIm3Mo0npneITZ2UPe4B1bg8fTNrFjD8715F/k1BvbviRuqYGkET1PgprrczXYTHFvotbBOmUp6KE0uA==" + emoji-regex@^8.0.0: version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" + resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== encodeurl@~1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" + resolved "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz" integrity sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w== es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5: version "1.20.3" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.3.tgz#90b143ff7aedc8b3d189bcfac7f1e3e3f81e9da1" + resolved "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.3.tgz" integrity sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw== dependencies: call-bind "^1.0.2" @@ -1845,8 +1916,8 @@ es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19 es-abstract@^1.20.4: version "1.22.1" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.22.1.tgz#8b4e5fc5cefd7f1660f0f8e1a52900dfbc9d9ccc" - integrity sha512-ioRRcXMO6OFyRpyzV3kE1IIBd4WG5/kltnzdxSCqoP8CMGs/Li+M1uF5o7lOkZVFjDs+NLesthnF66Pg/0q0Lw== + resolved "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.1.tgz" + integrity "sha1-i05fxc79fxZg8PjhpSkA37ydnMw= sha512-ioRRcXMO6OFyRpyzV3kE1IIBd4WG5/kltnzdxSCqoP8CMGs/Li+M1uF5o7lOkZVFjDs+NLesthnF66Pg/0q0Lw==" dependencies: array-buffer-byte-length "^1.0.0" arraybuffer.prototype.slice "^1.0.1" @@ -1890,8 +1961,8 @@ es-abstract@^1.20.4: es-get-iterator@^1.1.3: version "1.1.3" - resolved "https://registry.yarnpkg.com/es-get-iterator/-/es-get-iterator-1.1.3.tgz#3ef87523c5d464d41084b2c3c9c214f1199763d6" - integrity sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw== + resolved "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.3.tgz" + integrity "sha1-Pvh1I8XUZNQQhLLDycIU8RmXY9Y= sha512-sPZmqHBe6JIiTfN5q2pEi//TwxmAFHwj/XEuYjTuse78i8KxaqMTTzxPoFKuzRpDpTJ+0NAbpfenkmH2rePtuw==" dependencies: call-bind "^1.0.2" get-intrinsic "^1.1.3" @@ -1905,8 +1976,8 @@ es-get-iterator@^1.1.3: es-set-tostringtag@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz#338d502f6f674301d710b80c8592de8a15f09cd8" - integrity sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg== + resolved "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.1.tgz" + integrity "sha1-M41QL29nQwHXELgMhZLeihXwnNg= sha512-g3OMbtlwY3QewlqAiMLI47KywjWZoEytKr8pf6iTC8uJq5bIAH52Z9pnQ8pVL6whrCto53JZDuUIsifGeLorTg==" dependencies: get-intrinsic "^1.1.3" has "^1.0.3" @@ -1914,14 +1985,14 @@ es-set-tostringtag@^2.0.1: es-shim-unscopables@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + resolved "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz" integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== dependencies: has "^1.0.3" es-to-primitive@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" + resolved "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz" integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== dependencies: is-callable "^1.1.4" @@ -1930,12 +2001,12 @@ es-to-primitive@^1.2.1: es6-error@^4.0.1: version "4.1.1" - resolved "https://registry.yarnpkg.com/es6-error/-/es6-error-4.1.1.tgz#9e3af407459deed47e9a91f9b885a84eb05c561d" + resolved "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz" integrity sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg== esbuild@0.16.12: version "0.16.12" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.16.12.tgz#60850b9ad2f103f1c4316be42c34d5023f27378d" + resolved "https://registry.npmjs.org/esbuild/-/esbuild-0.16.12.tgz" integrity sha512-eq5KcuXajf2OmivCl4e89AD3j8fbV+UTE9vczEzq5haA07U9oOTzBWlh3+6ZdjJR7Rz2QfWZ2uxZyhZxBgJ4+g== optionalDependencies: "@esbuild/android-arm" "0.16.12" @@ -1963,37 +2034,37 @@ esbuild@0.16.12: escalade@^3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" + resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== escape-html@~1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" + resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== escape-string-regexp@4.0.0, escape-string-regexp@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== escape-string-regexp@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== escape-string-regexp@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz" integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== eslint-config-standard@^11.0.0-beta.0: version "11.0.0" - resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-11.0.0.tgz#87ee0d3c9d95382dc761958cbb23da9eea31e0ba" + resolved "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-11.0.0.tgz" integrity sha512-oDdENzpViEe5fwuRCWla7AXQd++/oyIp8zP+iP9jiUPG6NBj3SHgdgtl/kTn00AjeN+1HNvavTKmYbMo+xMOlw== eslint-import-resolver-node@^0.3.6: version "0.3.6" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz#4048b958395da89668252001dbd9eca6b83bacbd" + resolved "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.6.tgz" integrity sha512-0En0w03NRVMn9Uiyn8YRPDKvWjxCWkslUEhGNTdGx15RvPJYQ+lbOlqrlNI2vEAs4pDYK4f/HN2TbDmk5TP0iw== dependencies: debug "^3.2.7" @@ -2001,14 +2072,14 @@ eslint-import-resolver-node@^0.3.6: eslint-module-utils@^2.7.3: version "2.7.4" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz#4f3e41116aaf13a20792261e61d3a2e7e0583974" + resolved "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz" integrity sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA== dependencies: debug "^3.2.7" eslint-plugin-es@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz#f0822f0c18a535a97c3e714e89f88586a7641ec9" + resolved "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz" integrity sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ== dependencies: eslint-utils "^2.0.0" @@ -2016,7 +2087,7 @@ eslint-plugin-es@^4.1.0: eslint-plugin-import@^2.8.0: version "2.26.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz#f812dc47be4f2b72b478a021605a59fc6fe8b88b" + resolved "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.26.0.tgz" integrity sha512-hYfi3FXaM8WPLf4S1cikh/r4IxnO6zrhZbEGz2b660EJRbuxgpDS5gkCuYgGWg2xxh2rBuIr4Pvhve/7c31koA== dependencies: array-includes "^3.1.4" @@ -2035,7 +2106,7 @@ eslint-plugin-import@^2.8.0: eslint-plugin-mocha@^10.1.0: version "10.1.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-mocha/-/eslint-plugin-mocha-10.1.0.tgz#69325414f875be87fb2cb00b2ef33168d4eb7c8d" + resolved "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-10.1.0.tgz" integrity sha512-xLqqWUF17llsogVOC+8C6/jvQ+4IoOREbN7ZCHuOHuD6cT5cDD4h7f2LgsZuzMAiwswWE21tO7ExaknHVDrSkw== dependencies: eslint-utils "^3.0.0" @@ -2043,7 +2114,7 @@ eslint-plugin-mocha@^10.1.0: eslint-plugin-n@^15.7.0: version "15.7.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-n/-/eslint-plugin-n-15.7.0.tgz#e29221d8f5174f84d18f2eb94765f2eeea033b90" + resolved "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.7.0.tgz" integrity sha512-jDex9s7D/Qial8AGVIHq4W7NswpUD5DPDL2RH8Lzd9EloWUuvUkHfv4FRLMipH5q2UtyurorBkPeNi1wVWNh3Q== dependencies: builtins "^5.0.1" @@ -2057,7 +2128,7 @@ eslint-plugin-n@^15.7.0: eslint-plugin-node@^5.2.1: version "5.2.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-5.2.1.tgz#80df3253c4d7901045ec87fa660a284e32bdca29" + resolved "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-5.2.1.tgz" integrity sha512-xhPXrh0Vl/b7870uEbaumb2Q+LxaEcOQ3kS1jtIXanBAwpMre1l5q/l2l/hESYJGEFKuI78bp6Uw50hlpr7B+g== dependencies: ignore "^3.3.6" @@ -2067,17 +2138,17 @@ eslint-plugin-node@^5.2.1: eslint-plugin-promise@^3.6.0: version "3.8.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-3.8.0.tgz#65ebf27a845e3c1e9d6f6a5622ddd3801694b621" + resolved "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-3.8.0.tgz" integrity sha512-JiFL9UFR15NKpHyGii1ZcvmtIqa3UTwiDAGb8atSffe43qJ3+1czVGN6UtkklpcJ2DVnqvTMzEKRaJdBkAL2aQ== eslint-plugin-standard@^3.0.1: version "3.1.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-standard/-/eslint-plugin-standard-3.1.0.tgz#2a9e21259ba4c47c02d53b2d0c9135d4b1022d47" + resolved "https://registry.npmjs.org/eslint-plugin-standard/-/eslint-plugin-standard-3.1.0.tgz" integrity sha512-fVcdyuKRr0EZ4fjWl3c+gp1BANFJD1+RaWa2UPYfMZ6jCtp5RG00kSaXnK/dE5sYzt4kaWJ9qdxqUfc0d9kX0w== eslint-scope@^7.1.1: version "7.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz" integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== dependencies: esrecurse "^4.3.0" @@ -2085,36 +2156,36 @@ eslint-scope@^7.1.1: eslint-utils@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" + resolved "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz" integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== dependencies: eslint-visitor-keys "^1.1.0" eslint-utils@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + resolved "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz" integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== dependencies: eslint-visitor-keys "^2.0.0" eslint-visitor-keys@^1.1.0: version "1.3.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" + resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz" integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== eslint-visitor-keys@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz" integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== eslint-visitor-keys@^3.3.0: version "3.3.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + resolved "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz" integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== eslint@^8.23.0: version "8.24.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.24.0.tgz#489516c927a5da11b3979dbfb2679394523383c8" + resolved "https://registry.npmjs.org/eslint/-/eslint-8.24.0.tgz" integrity sha512-dWFaPhGhTAiPcCgm3f6LI2MBWbogMnTJzFBbhXVRQDJPkr9pGZvVjlVfXd+vyDcWPA2Ic9L2AXPIQM0+vk/cSQ== dependencies: "@eslint/eslintrc" "^1.3.2" @@ -2159,12 +2230,12 @@ eslint@^8.23.0: esm@^3.2.25: version "3.2.25" - resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10" + resolved "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz" integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA== espree@^9.4.0: version "9.4.0" - resolved "https://registry.yarnpkg.com/espree/-/espree-9.4.0.tgz#cd4bc3d6e9336c433265fc0aa016fc1aaf182f8a" + resolved "https://registry.npmjs.org/espree/-/espree-9.4.0.tgz" integrity sha512-DQmnRpLj7f6TgN/NYb0MTzJXL+vJF9h3pHy4JhCIs3zwcgez8xmGg3sXHcEO97BrmO2OSvCwMdfdlyl+E9KjOw== dependencies: acorn "^8.8.0" @@ -2173,57 +2244,57 @@ espree@^9.4.0: esprima@^4.0.0, esprima@~4.0.0: version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" + resolved "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== esquery@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + resolved "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz" integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== dependencies: estraverse "^5.1.0" esrecurse@^4.3.0: version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + resolved "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz" integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== dependencies: estraverse "^5.2.0" estraverse@^5.1.0, estraverse@^5.2.0: version "5.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz" integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== esutils@^2.0.2: version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" + resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== etag@~1.8.1: version "1.8.1" - resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" + resolved "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz" integrity sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg== event-lite@^0.1.1: version "0.1.2" - resolved "https://registry.yarnpkg.com/event-lite/-/event-lite-0.1.2.tgz#838a3e0fdddef8cc90f128006c8e55a4e4e4c11b" + resolved "https://registry.npmjs.org/event-lite/-/event-lite-0.1.2.tgz" integrity sha512-HnSYx1BsJ87/p6swwzv+2v6B4X+uxUteoDfRxsAb1S1BePzQqOLevVmkdA15GHJVd9A9Ok6wygUR18Hu0YeV9g== events-to-array@^1.0.1: version "1.1.2" - resolved "https://registry.yarnpkg.com/events-to-array/-/events-to-array-1.1.2.tgz#2d41f563e1fe400ed4962fe1a4d5c6a7539df7f6" + resolved "https://registry.npmjs.org/events-to-array/-/events-to-array-1.1.2.tgz" integrity sha512-inRWzRY7nG+aXZxBzEqYKB3HPgwflZRopAjDCHv0whhRx+MTUr1ei0ICZUypdyE0HRm4L2d5VEcIqLD6yl+BFA== events@1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924" - integrity sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw== + resolved "https://registry.npmjs.org/events/-/events-1.1.1.tgz" + integrity "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ= sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==" express@^4.18.2: version "4.18.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" - integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== + resolved "https://registry.npmjs.org/express/-/express-4.18.2.tgz" + integrity "sha1-P6vggpbpMMeWwZ48UWl5OGup/Vk= sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==" dependencies: accepts "~1.3.8" array-flatten "1.1.1" @@ -2259,12 +2330,12 @@ express@^4.18.2: fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" + resolved "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== fast-glob@^3.2.9: version "3.2.12" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + resolved "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.12.tgz" integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== dependencies: "@nodelib/fs.stat" "^2.0.2" @@ -2275,31 +2346,31 @@ fast-glob@^3.2.9: fast-json-stable-stringify@^2.0.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== fast-levenshtein@^2.0.6: version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" + resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== fastq@^1.6.0: version "1.13.0" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + resolved "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz" integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== dependencies: reusify "^1.0.4" file-entry-cache@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + resolved "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz" integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== dependencies: flat-cache "^3.0.4" fill-keys@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/fill-keys/-/fill-keys-1.0.2.tgz#9a8fa36f4e8ad634e3bf6b4f3c8882551452eb20" + resolved "https://registry.npmjs.org/fill-keys/-/fill-keys-1.0.2.tgz" integrity sha512-tcgI872xXjwFF4xgQmLxi76GnwJG3g/3isB1l4/G5Z4zrbddGpBjqZCO9oEAcB5wX0Hj/5iQB3toxfO7in1hHA== dependencies: is-object "~1.0.1" @@ -2307,14 +2378,14 @@ fill-keys@^1.0.2: fill-range@^7.0.1: version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" + resolved "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz" integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== dependencies: to-regex-range "^5.0.1" finalhandler@1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + resolved "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz" integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== dependencies: debug "2.6.9" @@ -2327,7 +2398,7 @@ finalhandler@1.2.0: find-cache-dir@^3.2.0: version "3.3.2" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-3.3.2.tgz#b30c5b6eff0730731aea9bbd9dbecbd80256d64b" + resolved "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz" integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== dependencies: commondir "^1.0.1" @@ -2336,7 +2407,7 @@ find-cache-dir@^3.2.0: find-up@5.0.0, find-up@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" + resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz" integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== dependencies: locate-path "^6.0.0" @@ -2344,7 +2415,7 @@ find-up@5.0.0, find-up@^5.0.0: find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" + resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== dependencies: locate-path "^5.0.0" @@ -2352,12 +2423,12 @@ find-up@^4.0.0, find-up@^4.1.0: findit@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/findit/-/findit-2.0.0.tgz#6509f0126af4c178551cfa99394e032e13a4d56e" + resolved "https://registry.npmjs.org/findit/-/findit-2.0.0.tgz" integrity sha512-ENZS237/Hr8bjczn5eKuBohLgaD0JyUd0arxretR1f9RO46vZHA1b2y0VorgGV3WaOT3c+78P8h7v4JGJ1i/rg== flat-cache@^3.0.4: version "3.0.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + resolved "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz" integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== dependencies: flatted "^3.1.0" @@ -2365,29 +2436,29 @@ flat-cache@^3.0.4: flat@^5.0.2: version "5.0.2" - resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" + resolved "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz" integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== flatted@^3.1.0: version "3.2.7" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + resolved "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz" integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== follow-redirects@^1.14.0: version "1.15.2" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz" integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== for-each@^0.3.3: version "0.3.3" - resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" + resolved "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz" integrity sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw== dependencies: is-callable "^1.1.3" foreground-child@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/foreground-child/-/foreground-child-2.0.0.tgz#71b32800c9f15aa8f2f83f4a6bd9bff35d861a53" + resolved "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz" integrity sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA== dependencies: cross-spawn "^7.0.0" @@ -2395,7 +2466,7 @@ foreground-child@^2.0.0: form-data@^2.5.1: version "2.5.1" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.1.tgz#f2cbec57b5e59e23716e128fe44d4e5dd23895f4" + resolved "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz" integrity sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA== dependencies: asynckit "^0.4.0" @@ -2404,52 +2475,52 @@ form-data@^2.5.1: forwarded@0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" + resolved "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz" integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== fresh@0.5.2: version "0.5.2" - resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" + resolved "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz" integrity sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q== fromentries@^1.2.0: version "1.3.2" - resolved "https://registry.yarnpkg.com/fromentries/-/fromentries-1.3.2.tgz#e4bca6808816bf8f93b52750f1127f5a6fd86e3a" + resolved "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz" integrity sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg== fs-exists-cached@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz#cf25554ca050dc49ae6656b41de42258989dcbce" + resolved "https://registry.npmjs.org/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz" integrity sha512-kSxoARUDn4F2RPXX48UXnaFKwVU7Ivd/6qpzZL29MCDmr9sTvybv4gFCp+qaI4fM9m0z9fgz/yJvi56GAz+BZg== fs.realpath@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" + resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== fsevents@~2.3.1, fsevents@~2.3.2: version "2.3.2" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz" integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== function-bind@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" + resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== function-bind@^1.1.2: version "1.1.2" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" - integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== + resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz" + integrity "sha1-LALYZNl/PqbIgwxGTL0Rq26rehw= sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==" function-loop@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/function-loop/-/function-loop-2.0.1.tgz#799c56ced01698cf12a1b80e4802e9dafc2ebada" + resolved "https://registry.npmjs.org/function-loop/-/function-loop-2.0.1.tgz" integrity sha512-ktIR+O6i/4h+j/ZhZJNdzeI4i9lEPeEK6UPR2EVyTVBqOwcU3Za9xYKLH64ZR9HmcROyRrOkizNyjjtWJzDDkQ== function.prototype.name@^1.1.5: version "1.1.5" - resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" + resolved "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz" integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== dependencies: call-bind "^1.0.2" @@ -2459,27 +2530,27 @@ function.prototype.name@^1.1.5: functions-have-names@^1.2.2, functions-have-names@^1.2.3: version "1.2.3" - resolved "https://registry.yarnpkg.com/functions-have-names/-/functions-have-names-1.2.3.tgz#0404fe4ee2ba2f607f0e0ec3c80bae994133b834" + resolved "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz" integrity sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ== gensync@^1.0.0-beta.2: version "1.0.0-beta.2" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.2.tgz#32a6ee76c3d7f52d46b2b1ae5d93fea8580a25e0" + resolved "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== get-caller-file@^2.0.1, get-caller-file@^2.0.5: version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" + resolved "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== get-func-name@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" - integrity sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig== + version "2.0.2" + resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.2.tgz#0d7cf20cd13fda808669ffa88f4ffc7a3943fc41" + integrity "sha1-DXzyDNE/2oCGaf+oj0/8ejlD/EE= sha512-8vXOvuE167CtIc3OyItco7N/dpRtBbYOsPsXCz7X/PMnlGjYjSGuZJgM1Y7mmew7BKf9BqvLX2tnOVy1BBUsxQ==" get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: version "1.1.3" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" + resolved "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz" integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== dependencies: function-bind "^1.1.1" @@ -2488,8 +2559,8 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@ get-intrinsic@^1.2.0, get-intrinsic@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.1.tgz#d295644fed4505fc9cde952c37ee12b477a83d82" - integrity sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw== + resolved "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.1.tgz" + integrity "sha1-0pVkT+1FBfyc3pUsN+4StHeoPYI= sha512-2DcsyfABl+gVHEfCOaTrWgyt+tb6MSEGmKq+kI5HwLbIYgjgmMcV8KQ41uaKz1xxUcn9tJtgFbQUEVcEbd0FYw==" dependencies: function-bind "^1.1.1" has "^1.0.3" @@ -2498,17 +2569,17 @@ get-intrinsic@^1.2.0, get-intrinsic@^1.2.1: get-package-type@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" + resolved "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz" integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== get-port@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/get-port/-/get-port-3.2.0.tgz#dd7ce7de187c06c8bf353796ac71e099f0980ebc" + resolved "https://registry.npmjs.org/get-port/-/get-port-3.2.0.tgz" integrity sha512-x5UJKlgeUiNT8nyo/AcnwLnZuZNcSjSw0kogRB+Whd1fjjFq4B1hySFxSFWWSn4mIBzg3sRNUDFYc4g5gjPoLg== get-symbol-description@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" + resolved "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz" integrity sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw== dependencies: call-bind "^1.0.2" @@ -2516,26 +2587,26 @@ get-symbol-description@^1.0.0: getopts@2.3.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/getopts/-/getopts-2.3.0.tgz#71e5593284807e03e2427449d4f6712a268666f4" + resolved "https://registry.npmjs.org/getopts/-/getopts-2.3.0.tgz" integrity sha512-5eDf9fuSXwxBL6q5HX+dhDj+dslFGWzU5thZ9kNKUkcPtaPdatmUFKwHFrLb/uf/WpA4BHET+AX3Scl56cAjpA== glob-parent@^5.1.2, glob-parent@~5.1.0, glob-parent@~5.1.2: version "5.1.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz" integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== dependencies: is-glob "^4.0.1" glob-parent@^6.0.1: version "6.0.2" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + resolved "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz" integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== dependencies: is-glob "^4.0.3" glob@7.1.6: version "7.1.6" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" + resolved "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz" integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== dependencies: fs.realpath "^1.0.0" @@ -2547,7 +2618,7 @@ glob@7.1.6: glob@^7.0.5, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@^7.2.3: version "7.2.3" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" + resolved "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== dependencies: fs.realpath "^1.0.0" @@ -2559,26 +2630,26 @@ glob@^7.0.5, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@^7.2.3: globals@^11.1.0: version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" + resolved "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^13.15.0: version "13.17.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" + resolved "https://registry.npmjs.org/globals/-/globals-13.17.0.tgz" integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== dependencies: type-fest "^0.20.2" globalthis@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" - integrity sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA== + resolved "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz" + integrity "sha1-WFKIKlK4DcMBsGYCc+HtCC8LbM8= sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==" dependencies: define-properties "^1.1.3" globby@^11.1.0: version "11.1.0" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.1.0.tgz#bd4be98bb042f83d796f7e3811991fbe82a0d34b" + resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz" integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== dependencies: array-union "^2.1.0" @@ -2590,95 +2661,95 @@ globby@^11.1.0: gopd@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c" - integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA== + resolved "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz" + integrity "sha1-Kf923mnax0ibfAkYpXiOVkd8Myw= sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==" dependencies: get-intrinsic "^1.1.3" graceful-fs@^4.1.15: version "4.2.10" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== grapheme-splitter@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + resolved "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz" integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== graphql@0.13.2: version "0.13.2" - resolved "https://registry.yarnpkg.com/graphql/-/graphql-0.13.2.tgz#4c740ae3c222823e7004096f832e7b93b2108270" + resolved "https://registry.npmjs.org/graphql/-/graphql-0.13.2.tgz" integrity sha512-QZ5BL8ZO/B20VA8APauGBg3GyEgZ19eduvpLWoq5x7gMmWnHoy8rlQWPLmWgFvo1yNgjSEFMesmS4R6pPr7xog== dependencies: iterall "^1.2.1" growl@1.10.5: version "1.10.5" - resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e" + resolved "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz" integrity sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA== has-async-hooks@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/has-async-hooks/-/has-async-hooks-1.0.0.tgz#3df965ade8cd2d9dbfdacfbca3e0a5152baaf204" + resolved "https://registry.npmjs.org/has-async-hooks/-/has-async-hooks-1.0.0.tgz" integrity sha512-YF0VPGjkxr7AyyQQNykX8zK4PvtEDsUJAPqwu06UFz1lb6EvI53sPh5H1kWxg8NXI5LsfRCZ8uX9NkYDZBb/mw== has-bigints@^1.0.1, has-bigints@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + resolved "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz" integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== has-dynamic-import@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/has-dynamic-import/-/has-dynamic-import-2.0.1.tgz#9bca87846aa264f2ad224fcd014946f5e5182f52" - integrity sha512-X3fbtsZmwb6W7fJGR9o7x65fZoodygCrZ3TVycvghP62yYQfS0t4RS0Qcz+j5tQYUKeSWS09tHkWW6WhFV3XhQ== + resolved "https://registry.npmjs.org/has-dynamic-import/-/has-dynamic-import-2.0.1.tgz" + integrity "sha1-m8qHhGqiZPKtIk/NAUlG9eUYL1I= sha512-X3fbtsZmwb6W7fJGR9o7x65fZoodygCrZ3TVycvghP62yYQfS0t4RS0Qcz+j5tQYUKeSWS09tHkWW6WhFV3XhQ==" dependencies: call-bind "^1.0.2" get-intrinsic "^1.1.1" has-flag@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== has-flag@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" + resolved "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz" integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== has-property-descriptors@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + resolved "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz" integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== dependencies: get-intrinsic "^1.1.1" has-proto@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.1.tgz#1885c1305538958aff469fef37937c22795408e0" - integrity sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg== + resolved "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz" + integrity "sha1-GIXBMFU4lYr/Rp/vN5N8InlUCOA= sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==" has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + resolved "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== has-tostringtag@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" + resolved "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz" integrity sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ== dependencies: has-symbols "^1.0.2" has@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" + resolved "https://registry.npmjs.org/has/-/has-1.0.3.tgz" integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== dependencies: function-bind "^1.1.1" hasha@^5.0.0: version "5.2.2" - resolved "https://registry.yarnpkg.com/hasha/-/hasha-5.2.2.tgz#a48477989b3b327aea3c04f53096d816d97522a1" + resolved "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz" integrity sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ== dependencies: is-stream "^2.0.0" @@ -2686,7 +2757,7 @@ hasha@^5.0.0: hdr-histogram-js@^1.0.0, hdr-histogram-js@^1.1.4: version "1.2.0" - resolved "https://registry.yarnpkg.com/hdr-histogram-js/-/hdr-histogram-js-1.2.0.tgz#1213c0b317f39b9c05bc4f208cb7931dbbc192ae" + resolved "https://registry.npmjs.org/hdr-histogram-js/-/hdr-histogram-js-1.2.0.tgz" integrity sha512-h0YToJ3ewqsaZ3nFTTa6dLOD7sqx+EgdC4+OcJ9Ou7zZDlT0sXSPHHr3cyenQsPqqbVHGn/oFY6zjfEKXGvzmQ== dependencies: base64-js "^1.2.0" @@ -2694,24 +2765,24 @@ hdr-histogram-js@^1.0.0, hdr-histogram-js@^1.1.4: hdr-histogram-percentiles-obj@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/hdr-histogram-percentiles-obj/-/hdr-histogram-percentiles-obj-2.0.1.tgz#7a4d52fa02087118c66469e6b66b74f9fbb44d82" + resolved "https://registry.npmjs.org/hdr-histogram-percentiles-obj/-/hdr-histogram-percentiles-obj-2.0.1.tgz" integrity sha512-QBvbTxPlGwHj36IRF16XLoYEbUv5YEyO385kiS0IS3831fcSTNXTR785VtFFZ2ahY733z0ky8Jv4d6In+Ss+wQ== dependencies: hdr-histogram-js "^1.0.0" he@1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/he/-/he-1.2.0.tgz#84ae65fa7eafb165fddb61566ae14baf05664f0f" + resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz" integrity sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw== html-escaper@^2.0.0: version "2.0.2" - resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + resolved "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz" integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== http-errors@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + resolved "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz" integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== dependencies: depd "2.0.0" @@ -2722,12 +2793,12 @@ http-errors@2.0.0: http-parser-js@^0.5.2: version "0.5.8" - resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.8.tgz#af23090d9ac4e24573de6f6aecc9d84a48bf20e3" + resolved "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz" integrity sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q== hyperid@^2.0.3: version "2.3.1" - resolved "https://registry.yarnpkg.com/hyperid/-/hyperid-2.3.1.tgz#70cc2c917b6367c9f7307718be243bc28b258353" + resolved "https://registry.npmjs.org/hyperid/-/hyperid-2.3.1.tgz" integrity sha512-mIbI7Ymn6MCdODaW1/6wdf5lvvXzmPsARN4zTLakMmcziBOuP4PxCBJvHF6kbAIHX6H4vAELx/pDmt0j6Th5RQ== dependencies: uuid "^8.3.2" @@ -2735,44 +2806,44 @@ hyperid@^2.0.3: iconv-lite@0.4.24: version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" + resolved "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz" integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== dependencies: safer-buffer ">= 2.1.2 < 3" ieee754@1.1.13: version "1.1.13" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" - integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== + resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz" + integrity "sha1-7BaFWOlaoYH9h9N/VcMrvLZwi4Q= sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" ieee754@^1.1.4, ieee754@^1.1.8: version "1.2.1" - resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + resolved "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== ignore@^3.3.6: version "3.3.10" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043" + resolved "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz" integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug== ignore@^5.1.1, ignore@^5.2.4: version "5.2.4" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" + resolved "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz" integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== ignore@^5.2.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.0.tgz#6d3bac8fa7fe0d45d9f9be7bac2fc279577e345a" + resolved "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz" integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== immediate@~3.0.5: version "3.0.6" - resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b" + resolved "https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz" integrity sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ== import-fresh@^3.0.0, import-fresh@^3.2.1: version "3.3.0" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz" integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== dependencies: parent-module "^1.0.0" @@ -2780,8 +2851,8 @@ import-fresh@^3.0.0, import-fresh@^3.2.1: import-in-the-middle@^1.4.2: version "1.4.2" - resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.4.2.tgz#2a266676e3495e72c04bbaa5ec14756ba168391b" - integrity sha512-9WOz1Yh/cvO/p69sxRmhyQwrIGGSp7EIdcb+fFNVi7CzQGQB8U1/1XrKVSbEd/GNOAeM0peJtmi7+qphe7NvAw== + resolved "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-1.4.2.tgz" + integrity "sha1-KiZmduNJXnLAS7ql7BR1a6FoORs= sha512-9WOz1Yh/cvO/p69sxRmhyQwrIGGSp7EIdcb+fFNVi7CzQGQB8U1/1XrKVSbEd/GNOAeM0peJtmi7+qphe7NvAw==" dependencies: acorn "^8.8.2" acorn-import-assertions "^1.9.0" @@ -2790,17 +2861,17 @@ import-in-the-middle@^1.4.2: imurmurhash@^0.1.4: version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" + resolved "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz" integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA== indent-string@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" + resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== inflight@^1.0.4: version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" + resolved "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz" integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA== dependencies: once "^1.3.0" @@ -2808,12 +2879,12 @@ inflight@^1.0.4: inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" + resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== ink@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/ink/-/ink-3.2.0.tgz#434793630dc57d611c8fe8fffa1db6b56f1a16bb" + resolved "https://registry.npmjs.org/ink/-/ink-3.2.0.tgz" integrity sha512-firNp1q3xxTzoItj/eOOSZQnYSlyrWks5llCTVX37nJ59K3eXbQ8PtzCguqo8YI19EELo5QxaKnJd4VxzhU8tg== dependencies: ansi-escapes "^4.2.1" @@ -2842,12 +2913,12 @@ ink@^3.2.0: int64-buffer@^0.1.9: version "0.1.10" - resolved "https://registry.yarnpkg.com/int64-buffer/-/int64-buffer-0.1.10.tgz#277b228a87d95ad777d07c13832022406a473423" + resolved "https://registry.npmjs.org/int64-buffer/-/int64-buffer-0.1.10.tgz" integrity sha512-v7cSY1J8ydZ0GyjUHqF+1bshJ6cnEVLo9EnjB8p+4HDRPZc9N5jjmvUV7NvEsqQOKyH0pmIBFWXVQbiS0+OBbA== internal-slot@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c" + resolved "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz" integrity sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA== dependencies: get-intrinsic "^1.1.0" @@ -2856,8 +2927,8 @@ internal-slot@^1.0.3: internal-slot@^1.0.4, internal-slot@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.5.tgz#f2a2ee21f668f8627a4667f309dc0f4fb6674986" - integrity sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ== + resolved "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.5.tgz" + integrity "sha1-8qLuIfZo+GJ6RmfzCdwPT7ZnSYY= sha512-Y+R5hJrzs52QCG2laLn4udYVnxsfny9CpOhNhUvk/SSSVyF6T27FzRbF0sroPidSu3X8oEAkOn2K804mjpt6UQ==" dependencies: get-intrinsic "^1.2.0" has "^1.0.3" @@ -2865,22 +2936,22 @@ internal-slot@^1.0.4, internal-slot@^1.0.5: interpret@^2.2.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9" + resolved "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz" integrity sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw== ipaddr.js@1.9.1: version "1.9.1" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" + resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== ipaddr.js@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.1.0.tgz#2119bc447ff8c257753b196fc5f1ce08a4cdf39f" + resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.1.0.tgz" integrity sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ== is-arguments@^1.0.4, is-arguments@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" + resolved "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.1.tgz" integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== dependencies: call-bind "^1.0.2" @@ -2888,8 +2959,8 @@ is-arguments@^1.0.4, is-arguments@^1.1.1: is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe" - integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w== + resolved "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz" + integrity "sha1-8mU87YQSCBY47LDrvQxBxuCuy74= sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==" dependencies: call-bind "^1.0.2" get-intrinsic "^1.2.0" @@ -2897,21 +2968,21 @@ is-array-buffer@^3.0.1, is-array-buffer@^3.0.2: is-bigint@^1.0.1: version "1.0.4" - resolved "https://registry.yarnpkg.com/is-bigint/-/is-bigint-1.0.4.tgz#08147a1875bc2b32005d41ccd8291dffc6691df3" + resolved "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz" integrity sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg== dependencies: has-bigints "^1.0.1" is-binary-path@~2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" + resolved "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz" integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== dependencies: binary-extensions "^2.0.0" is-boolean-object@^1.1.0: version "1.1.2" - resolved "https://registry.yarnpkg.com/is-boolean-object/-/is-boolean-object-1.1.2.tgz#5c6dc200246dd9321ae4b885a114bb1f75f63719" + resolved "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz" integrity sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA== dependencies: call-bind "^1.0.2" @@ -2919,101 +2990,101 @@ is-boolean-object@^1.1.0: is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.6, is-callable@^1.2.7: version "1.2.7" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + resolved "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz" integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== is-ci@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" + resolved "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz" integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== dependencies: ci-info "^2.0.0" is-core-module@^2.11.0: version "2.12.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.12.0.tgz#36ad62f6f73c8253fd6472517a12483cf03e7ec4" + resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.12.0.tgz" integrity sha512-RECHCBCd/viahWmwj6enj19sKbHfJrddi/6cBDsNTKbNq0f7VeaUkBo60BqzvPqo/W54ChS62Z5qyun7cfOMqQ== dependencies: has "^1.0.3" is-core-module@^2.8.1, is-core-module@^2.9.0: version "2.10.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.10.0.tgz#9012ede0a91c69587e647514e1d5277019e728ed" + resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.10.0.tgz" integrity sha512-Erxj2n/LDAZ7H8WNJXd9tw38GYM3dv8rk8Zcs+jJuxYTW7sozH+SS8NtrSjVL1/vpLvWi1hxy96IzjJ3EHTJJg== dependencies: has "^1.0.3" is-date-object@^1.0.1, is-date-object@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.5.tgz#0841d5536e724c25597bf6ea62e1bd38298df31f" + resolved "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz" integrity sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ== dependencies: has-tostringtag "^1.0.0" is-extglob@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" + resolved "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz" integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ== is-fullwidth-code-point@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" + resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz" integrity sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w== is-fullwidth-code-point@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" + resolved "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== is-generator-function@^1.0.7: version "1.0.10" - resolved "https://registry.yarnpkg.com/is-generator-function/-/is-generator-function-1.0.10.tgz#f1558baf1ac17e0deea7c0415c438351ff2b3c72" - integrity sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A== + resolved "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz" + integrity "sha1-8VWLrxrBfg3up8BBXEODUf8rPHI= sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==" dependencies: has-tostringtag "^1.0.0" is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + resolved "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz" integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: is-extglob "^2.1.1" is-map@^2.0.1, is-map@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/is-map/-/is-map-2.0.2.tgz#00922db8c9bf73e81b7a335827bc2a43f2b91127" - integrity sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg== + resolved "https://registry.npmjs.org/is-map/-/is-map-2.0.2.tgz" + integrity "sha1-AJItuMm/c+gbejNYJ7wqQ/K5ESc= sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==" is-negative-zero@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + resolved "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz" integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== is-number-object@^1.0.4: version "1.0.7" - resolved "https://registry.yarnpkg.com/is-number-object/-/is-number-object-1.0.7.tgz#59d50ada4c45251784e9904f5246c742f07a42fc" + resolved "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz" integrity sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ== dependencies: has-tostringtag "^1.0.0" is-number@^7.0.0: version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" + resolved "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== is-object@~1.0.1: version "1.0.2" - resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.2.tgz#a56552e1c665c9e950b4a025461da87e72f86fcf" + resolved "https://registry.npmjs.org/is-object/-/is-object-1.0.2.tgz" integrity sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA== is-plain-obj@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" + resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz" integrity sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA== is-regex@^1.1.4: version "1.1.4" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + resolved "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz" integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== dependencies: call-bind "^1.0.2" @@ -3021,39 +3092,39 @@ is-regex@^1.1.4: is-set@^2.0.1, is-set@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/is-set/-/is-set-2.0.2.tgz#90755fa4c2562dc1c5d4024760d6119b94ca18ec" - integrity sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g== + resolved "https://registry.npmjs.org/is-set/-/is-set-2.0.2.tgz" + integrity "sha1-kHVfpMJWLcHF1AJHYNYRm5TKGOw= sha512-+2cnTEZeY5z/iXGbLhPrOAaK/Mau5k5eXq9j14CpRTftq0pAJu2MwVRSZhyZWBzx3o6X795Lz6Bpb6R0GKf37g==" is-shared-array-buffer@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + resolved "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz" integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== dependencies: call-bind "^1.0.2" is-stream@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077" + resolved "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz" integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg== is-string@^1.0.5, is-string@^1.0.7: version "1.0.7" - resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.7.tgz#0dd12bf2006f255bb58f695110eff7491eebc0fd" + resolved "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz" integrity sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg== dependencies: has-tostringtag "^1.0.0" is-symbol@^1.0.2, is-symbol@^1.0.3: version "1.0.4" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.4.tgz#a6dac93b635b063ca6872236de88910a57af139c" + resolved "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz" integrity sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg== dependencies: has-symbols "^1.0.2" is-typed-array@^1.1.10, is-typed-array@^1.1.9: version "1.1.11" - resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.11.tgz#893621188e6919d4e6a488b9f6557d8c4b051953" - integrity sha512-l2SCJk9RflSWHQjOJJgNsV5FnE1pq/RpHnYW6ckSjTCYypv07SMbiRSCmLQD63WOv2eXaEwNsn+7kcn3csvYSw== + resolved "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.11.tgz" + integrity "sha1-iTYhGI5pGdTmpIi59lV9jEsFGVM= sha512-l2SCJk9RflSWHQjOJJgNsV5FnE1pq/RpHnYW6ckSjTCYypv07SMbiRSCmLQD63WOv2eXaEwNsn+7kcn3csvYSw==" dependencies: available-typed-arrays "^1.0.5" call-bind "^1.0.2" @@ -3063,76 +3134,76 @@ is-typed-array@^1.1.10, is-typed-array@^1.1.9: is-typed-array@^1.1.3: version "1.1.12" - resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a" - integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg== + resolved "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz" + integrity "sha1-0Lq1aG70p296cwl7lUcKsZnFfUo= sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==" dependencies: which-typed-array "^1.1.11" is-typedarray@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" + resolved "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz" integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== is-weakmap@^2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/is-weakmap/-/is-weakmap-2.0.1.tgz#5008b59bdc43b698201d18f62b37b2ca243e8cf2" - integrity sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA== + resolved "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz" + integrity "sha1-UAi1m9xDtpggHRj2KzeyyiQ+jPI= sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==" is-weakref@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/is-weakref/-/is-weakref-1.0.2.tgz#9529f383a9338205e89765e0392efc2f100f06f2" + resolved "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz" integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== dependencies: call-bind "^1.0.2" is-weakset@^2.0.1: version "2.0.2" - resolved "https://registry.yarnpkg.com/is-weakset/-/is-weakset-2.0.2.tgz#4569d67a747a1ce5a994dfd4ef6dcea76e7c0a1d" - integrity sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg== + resolved "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.2.tgz" + integrity "sha1-RWnWenR6HOWplN/U723Op258Ch0= sha512-t2yVvttHkQktwnNNmBQ98AhENLdPUTDTE21uPqAQ0ARwQfGeQKRVS0NNurH7bTf7RrvcVn1OOge45CnBeHCSmg==" dependencies: call-bind "^1.0.2" get-intrinsic "^1.1.1" is-windows@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + resolved "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz" integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== isarray@0.0.1: version "0.0.1" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" + resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== isarray@^1.0.0, isarray@~1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" + resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== isarray@^2.0.5: version "2.0.5" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-2.0.5.tgz#8af1e4c1221244cc62459faf38940d4e644a5723" - integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== + resolved "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz" + integrity "sha1-ivHkwSISRMxiRZ+vOJQNTmRKVyM= sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" isexe@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" + resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw== istanbul-lib-coverage@3.2.0, istanbul-lib-coverage@^3.0.0, istanbul-lib-coverage@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz#189e7909d0a39fa5a3dfad5b03f71947770191d3" + resolved "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz" integrity sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw== istanbul-lib-hook@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz#8f84c9434888cc6b1d0a9d7092a76d239ebf0cc6" + resolved "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz" integrity sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ== dependencies: append-transform "^2.0.0" istanbul-lib-instrument@^4.0.0: version "4.0.3" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d" + resolved "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz" integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ== dependencies: "@babel/core" "^7.7.5" @@ -3142,7 +3213,7 @@ istanbul-lib-instrument@^4.0.0: istanbul-lib-processinfo@^2.0.2, istanbul-lib-processinfo@^2.0.3: version "2.0.3" - resolved "https://registry.yarnpkg.com/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz#366d454cd0dcb7eb6e0e419378e60072c8626169" + resolved "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz" integrity sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg== dependencies: archy "^1.0.0" @@ -3154,7 +3225,7 @@ istanbul-lib-processinfo@^2.0.2, istanbul-lib-processinfo@^2.0.3: istanbul-lib-report@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" + resolved "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz" integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== dependencies: istanbul-lib-coverage "^3.0.0" @@ -3163,7 +3234,7 @@ istanbul-lib-report@^3.0.0: istanbul-lib-source-maps@^4.0.0: version "4.0.1" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz#895f3a709fcfba34c6de5a42939022f3e4358551" + resolved "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz" integrity sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw== dependencies: debug "^4.1.1" @@ -3172,7 +3243,7 @@ istanbul-lib-source-maps@^4.0.0: istanbul-reports@^3.0.2: version "3.1.5" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.1.5.tgz#cc9a6ab25cb25659810e4785ed9d9fb742578bae" + resolved "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.5.tgz" integrity sha512-nUsEMa9pBt/NOHqbcbeJEgqIlY/K7rVWUX6Lql2orY5e9roQOthbR3vtY4zzf2orPELg80fnxxk9zUyPlgwD1w== dependencies: html-escaper "^2.0.0" @@ -3180,48 +3251,48 @@ istanbul-reports@^3.0.2: iterall@^1.2.1: version "1.3.0" - resolved "https://registry.yarnpkg.com/iterall/-/iterall-1.3.0.tgz#afcb08492e2915cbd8a0884eb93a8c94d0d72fea" + resolved "https://registry.npmjs.org/iterall/-/iterall-1.3.0.tgz" integrity sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg== jackspeak@^1.4.2: version "1.4.2" - resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-1.4.2.tgz#30ad5e4b7b36f9f3ae580e23272b1a386b4f6b93" + resolved "https://registry.npmjs.org/jackspeak/-/jackspeak-1.4.2.tgz" integrity sha512-GHeGTmnuaHnvS+ZctRB01bfxARuu9wW83ENbuiweu07SFcVlZrJpcshSre/keGT7YGBhLHg/+rXCNSrsEHKU4Q== dependencies: cliui "^7.0.4" jest-docblock@^29.7.0: version "29.7.0" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-29.7.0.tgz#8fddb6adc3cdc955c93e2a87f61cfd350d5d119a" - integrity sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g== + resolved "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz" + integrity "sha1-j922rcPNyVXJPiqH9hz9NQ1dEZo= sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==" dependencies: detect-newline "^3.0.0" jmespath@0.16.0: version "0.16.0" - resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.16.0.tgz#b15b0a85dfd4d930d43e69ed605943c802785076" - integrity sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw== + resolved "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz" + integrity "sha1-sVsKhd/U2TDUPmntYFlDyAJ4UHY= sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==" js-sdsl@^4.1.4: version "4.1.4" - resolved "https://registry.yarnpkg.com/js-sdsl/-/js-sdsl-4.1.4.tgz#78793c90f80e8430b7d8dc94515b6c77d98a26a6" + resolved "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.1.4.tgz" integrity sha512-Y2/yD55y5jteOAmY50JbUZYwk3CP3wnLPEZnlR1w9oKhITrBEtAxwuWKebFf8hMrPMgbYwFoWK/lH2sBkErELw== "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" + resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== js-yaml@4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.0.0.tgz#f426bc0ff4b4051926cd588c71113183409a121f" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz" integrity sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q== dependencies: argparse "^2.0.1" js-yaml@^3.13.1: version "3.14.1" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== dependencies: argparse "^1.0.7" @@ -3229,51 +3300,51 @@ js-yaml@^3.13.1: js-yaml@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz" integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== dependencies: argparse "^2.0.1" jsesc@^2.5.1: version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" + resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== json-schema-traverse@^0.4.1: version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + resolved "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz" integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== json-stringify-safe@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" + resolved "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz" integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== json5@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" - integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== + version "1.0.2" + resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" + integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA== dependencies: minimist "^1.2.0" json5@^2.2.1: version "2.2.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" + resolved "https://registry.npmjs.org/json5/-/json5-2.2.1.tgz" integrity sha512-1hqLFMSrGHRHxav9q9gNjJ5EXznIxGVO09xQRrwplcS8qs28pZ8s8hupZAmqDwZUmVZ2Qb2jnyPOWcDH8m8dlA== -json5@^2.2.2: +json5@^2.2.3: version "2.2.3" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.3.tgz#78cd6f1a19bdc12b73db5ad0c61efd66c1e29283" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== jszip@^3.5.0: version "3.10.1" - resolved "https://registry.yarnpkg.com/jszip/-/jszip-3.10.1.tgz#34aee70eb18ea1faec2f589208a157d1feb091c2" + resolved "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz" integrity sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g== dependencies: lie "~3.3.0" @@ -3283,12 +3354,12 @@ jszip@^3.5.0: just-extend@^4.0.2: version "4.2.1" - resolved "https://registry.yarnpkg.com/just-extend/-/just-extend-4.2.1.tgz#ef5e589afb61e5d66b24eca749409a8939a8c744" + resolved "https://registry.npmjs.org/just-extend/-/just-extend-4.2.1.tgz" integrity sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg== knex@^2.4.2: version "2.4.2" - resolved "https://registry.yarnpkg.com/knex/-/knex-2.4.2.tgz#a34a289d38406dc19a0447a78eeaf2d16ebedd61" + resolved "https://registry.npmjs.org/knex/-/knex-2.4.2.tgz" integrity sha512-tMI1M7a+xwHhPxjbl/H9K1kHX+VncEYcvCx5K00M16bWvpYPKAZd6QrCu68PtHAdIZNQPWZn0GVhqVBEthGWCg== dependencies: colorette "2.0.19" @@ -3308,12 +3379,12 @@ knex@^2.4.2: koalas@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/koalas/-/koalas-1.0.2.tgz#318433f074235db78fae5661a02a8ca53ee295cd" + resolved "https://registry.npmjs.org/koalas/-/koalas-1.0.2.tgz" integrity sha512-RYhBbYaTTTHId3l6fnMZc3eGQNW6FVCqMG6AMwA5I1Mafr6AflaXeoi6x3xQuATRotGYRLk6+1ELZH4dstFNOA== levn@^0.4.1: version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + resolved "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz" integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== dependencies: prelude-ls "^1.2.1" @@ -3321,7 +3392,7 @@ levn@^0.4.1: libtap@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/libtap/-/libtap-1.4.0.tgz#5c6dea65d2d95f2c855d819a457e1fa7d2af5bf0" + resolved "https://registry.npmjs.org/libtap/-/libtap-1.4.0.tgz" integrity sha512-STLFynswQ2A6W14JkabgGetBNk6INL1REgJ9UeNKw5llXroC2cGLgKTqavv0sl8OLVztLLipVKMcQ7yeUcqpmg== dependencies: async-hook-domain "^2.0.4" @@ -3340,150 +3411,150 @@ libtap@^1.4.0: lie@~3.3.0: version "3.3.0" - resolved "https://registry.yarnpkg.com/lie/-/lie-3.3.0.tgz#dcf82dee545f46074daf200c7c1c5a08e0f40f6a" + resolved "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz" integrity sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ== dependencies: immediate "~3.0.5" limiter@^1.1.4: version "1.1.5" - resolved "https://registry.yarnpkg.com/limiter/-/limiter-1.1.5.tgz#8f92a25b3b16c6131293a0cc834b4a838a2aa7c2" + resolved "https://registry.npmjs.org/limiter/-/limiter-1.1.5.tgz" integrity sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA== locate-path@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz" integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== dependencies: p-locate "^4.1.0" locate-path@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286" + resolved "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz" integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw== dependencies: p-locate "^5.0.0" lodash.flattendeep@^4.4.0: version "4.4.0" - resolved "https://registry.yarnpkg.com/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz#fb030917f86a3134e5bc9bec0d69e0013ddfedb2" + resolved "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz" integrity sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ== lodash.get@^4.4.2: version "4.4.2" - resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" + resolved "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz" integrity sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ== lodash.kebabcase@^4.1.1: version "4.1.1" - resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36" + resolved "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz" integrity sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g== lodash.merge@^4.6.2: version "4.6.2" - resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + resolved "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== lodash.pick@^4.4.0: version "4.4.0" - resolved "https://registry.yarnpkg.com/lodash.pick/-/lodash.pick-4.4.0.tgz#52f05610fff9ded422611441ed1fc123a03001b3" + resolved "https://registry.npmjs.org/lodash.pick/-/lodash.pick-4.4.0.tgz" integrity sha512-hXt6Ul/5yWjfklSGvLQl8vM//l3FtyHZeuelpzK6mm99pNvN9yTDruNZPEJZD1oWrqo+izBmB7oUfWgcCX7s4Q== lodash.sortby@^4.7.0: version "4.7.0" - resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" + resolved "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz" integrity sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA== lodash.uniq@^4.5.0: version "4.5.0" - resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" + resolved "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz" integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== lodash@^4.17.13, lodash@^4.17.20, lodash@^4.17.21, lodash@^4.17.4: version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" + resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== log-symbols@4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-4.0.0.tgz#69b3cc46d20f448eccdb75ea1fa733d9e821c920" + resolved "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz" integrity sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA== dependencies: chalk "^4.0.0" long@^5.0.0: version "5.2.0" - resolved "https://registry.yarnpkg.com/long/-/long-5.2.0.tgz#2696dadf4b4da2ce3f6f6b89186085d94d52fd61" + resolved "https://registry.npmjs.org/long/-/long-5.2.0.tgz" integrity sha512-9RTUNjK60eJbx3uz+TEGF7fUr29ZDxR5QzXcyDpeSfeH28S9ycINflOgOlppit5U+4kNTe83KQnMEerw7GmE8w== loose-envify@^1.1.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" + resolved "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz" integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== dependencies: js-tokens "^3.0.0 || ^4.0.0" loupe@^2.3.1: version "2.3.4" - resolved "https://registry.yarnpkg.com/loupe/-/loupe-2.3.4.tgz#7e0b9bffc76f148f9be769cb1321d3dcf3cb25f3" + resolved "https://registry.npmjs.org/loupe/-/loupe-2.3.4.tgz" integrity sha512-OvKfgCC2Ndby6aSTREl5aCCPTNIzlDfQZvZxNUrBrihDhL3xcrYegTblhmEiCrg2kKQz4XsFIaemE5BF4ybSaQ== dependencies: get-func-name "^2.0.0" lru-cache@^5.1.1: version "5.1.1" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz" integrity sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w== dependencies: yallist "^3.0.2" lru-cache@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz" integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== dependencies: yallist "^4.0.0" lru-cache@^7.14.0: version "7.14.0" - resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.0.tgz#21be64954a4680e303a09e9468f880b98a0b3c7f" + resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-7.14.0.tgz" integrity sha512-EIRtP1GrSJny0dqb50QXRUNBxHJhcpxHC++M5tD7RYbvLLn5KVWKsbyswSSqDuU15UFi3bgTQIY8nhDMeF6aDQ== make-dir@^3.0.0, make-dir@^3.0.2: version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + resolved "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz" integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== dependencies: semver "^6.0.0" manage-path@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/manage-path/-/manage-path-2.0.0.tgz#f4cf8457b926eeee2a83b173501414bc76eb9597" + resolved "https://registry.npmjs.org/manage-path/-/manage-path-2.0.0.tgz" integrity sha512-NJhyB+PJYTpxhxZJ3lecIGgh4kwIY2RAh44XvAz9UlqthlQwtPBf62uBVR8XaD8CRuSjQ6TnZH2lNJkbLPZM2A== media-typer@0.3.0: version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" + resolved "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz" integrity sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ== merge-descriptors@1.0.1, merge-descriptors@~1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" + resolved "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz" integrity sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w== merge2@^1.3.0, merge2@^1.4.1: version "1.4.1" - resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" + resolved "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz" integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== methods@^1.1.2, methods@~1.1.2: version "1.1.2" - resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" + resolved "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== micromatch@^4.0.4: version "4.0.5" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.5.tgz#bc8999a7cbbf77cdc89f132f6e467051b49090c6" + resolved "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz" integrity sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA== dependencies: braces "^3.0.2" @@ -3491,82 +3562,82 @@ micromatch@^4.0.4: mime-db@1.52.0: version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== mime-types@^2.1.12, mime-types@~2.1.24, mime-types@~2.1.34: version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== dependencies: mime-db "1.52.0" mime@1.6.0: version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" + resolved "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== mimic-fn@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" + resolved "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz" integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== minimatch@3.0.4: version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== dependencies: brace-expansion "^1.1.7" minimatch@^3.0.4, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: brace-expansion "^1.1.7" minimist@^1.2.0: version "1.2.6" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz" integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== minimist@^1.2.6: version "1.2.7" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" + resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.7.tgz" integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== minimist@^1.2.8: version "1.2.8" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.8.tgz#c1a464e7693302e082a075cee0c057741ac4772c" - integrity sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA== + resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz" + integrity "sha1-waRk52kzAuCCoHXO4MBXdBrEdyw= sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==" minipass@^3.1.5, minipass@^3.1.6, minipass@^3.3.4: version "3.3.6" - resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" + resolved "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz" integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== dependencies: yallist "^4.0.0" mkdirp@^0.5.0, mkdirp@^0.5.4: version "0.5.6" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.6.tgz#7def03d2432dcae4ba1d611445c48396062255f6" + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz" integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== dependencies: minimist "^1.2.6" mkdirp@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== mkdirp@^3.0.1: version "3.0.1" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-3.0.1.tgz#e44e4c5607fb279c168241713cc6e0fea9adcb50" - integrity sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg== + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz" + integrity "sha1-5E5MVgf7J5wWgkFxPMbg/qmty1A= sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==" mocha@8: version "8.4.0" - resolved "https://registry.yarnpkg.com/mocha/-/mocha-8.4.0.tgz#677be88bf15980a3cae03a73e10a0fc3997f0cff" + resolved "https://registry.npmjs.org/mocha/-/mocha-8.4.0.tgz" integrity sha512-hJaO0mwDXmZS4ghXsvPVriOhsxQ7ofcpQdm8dE+jISUOKopitvnXFQmpRR7jd2K6VBG6E26gU3IAbXXGIbu4sQ== dependencies: "@ungap/promise-all-settled" "1.1.2" @@ -3597,32 +3668,32 @@ mocha@8: module-details-from-path@^1.0.3: version "1.0.3" - resolved "https://registry.yarnpkg.com/module-details-from-path/-/module-details-from-path-1.0.3.tgz#114c949673e2a8a35e9d35788527aa37b679da2b" + resolved "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz" integrity sha512-ySViT69/76t8VhE1xXHK6Ch4NcDd26gx0MzKXLO+F7NOtnqH68d9zF94nT8ZWSxXh8ELOERsnJO/sWt1xZYw5A== module-not-found-error@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/module-not-found-error/-/module-not-found-error-1.0.1.tgz#cf8b4ff4f29640674d6cdd02b0e3bc523c2bbdc0" + resolved "https://registry.npmjs.org/module-not-found-error/-/module-not-found-error-1.0.1.tgz" integrity sha512-pEk4ECWQXV6z2zjhRZUongnLJNUeGQJ3w6OQ5ctGwD+i5o93qjRQUk2Rt6VdNeu3sEP0AB4LcfvdebpxBRVr4g== ms@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" + resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== -ms@2.1.2: +ms@2.1.2, ms@^2.1.2: version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@2.1.3, ms@^2.1.1, ms@^2.1.2: +ms@2.1.3, ms@^2.1.1: version "2.1.3" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== msgpack-lite@^0.1.26: version "0.1.26" - resolved "https://registry.yarnpkg.com/msgpack-lite/-/msgpack-lite-0.1.26.tgz#dd3c50b26f059f25e7edee3644418358e2a9ad89" + resolved "https://registry.npmjs.org/msgpack-lite/-/msgpack-lite-0.1.26.tgz" integrity sha512-SZ2IxeqZ1oRFGo0xFGbvBJWMp3yLIY9rlIJyxy8CGrwZn1f0ZK4r6jV/AM1r0FZMDUkWkglOk/eeKIL9g77Nxw== dependencies: event-lite "^0.1.1" @@ -3632,7 +3703,7 @@ msgpack-lite@^0.1.26: multer@^1.4.5-lts.1: version "1.4.5-lts.1" - resolved "https://registry.yarnpkg.com/multer/-/multer-1.4.5-lts.1.tgz#803e24ad1984f58edffbc79f56e305aec5cfd1ac" + resolved "https://registry.npmjs.org/multer/-/multer-1.4.5-lts.1.tgz" integrity sha512-ywPWvcDMeH+z9gQq5qYHCCy+ethsk4goepZ45GLD63fOu0YcNecQxi64nDs3qluZB+murG3/D4dJ7+dGctcCQQ== dependencies: append-field "^1.0.0" @@ -3645,23 +3716,23 @@ multer@^1.4.5-lts.1: nanoid@3.1.20: version "3.1.20" - resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.20.tgz#badc263c6b1dcf14b71efaa85f6ab4c1d6cfc788" + resolved "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz" integrity sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw== natural-compare@^1.4.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + resolved "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz" integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== negotiator@0.6.3: version "0.6.3" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" + resolved "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz" integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== nise@^5.1.4: version "5.1.4" - resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.4.tgz#491ce7e7307d4ec546f5a659b2efe94a18b4bbc0" - integrity sha512-8+Ib8rRJ4L0o3kfmyVCL7gzrohyDe0cMFTBa2d364yIrEGMEoetznKJx899YxjybU6bL9SQkYPSBBs1gyYs8Xg== + resolved "https://registry.npmjs.org/nise/-/nise-5.1.4.tgz" + integrity "sha1-SRzn5zB9TsVG9aZZsu/pShi0u8A= sha512-8+Ib8rRJ4L0o3kfmyVCL7gzrohyDe0cMFTBa2d364yIrEGMEoetznKJx899YxjybU6bL9SQkYPSBBs1gyYs8Xg==" dependencies: "@sinonjs/commons" "^2.0.0" "@sinonjs/fake-timers" "^10.0.2" @@ -3671,7 +3742,7 @@ nise@^5.1.4: nock@^11.3.3: version "11.9.1" - resolved "https://registry.yarnpkg.com/nock/-/nock-11.9.1.tgz#2b026c5beb6d0dbcb41e7e4cefa671bc36db9c61" + resolved "https://registry.npmjs.org/nock/-/nock-11.9.1.tgz" integrity sha512-U5wPctaY4/ar2JJ5Jg4wJxlbBfayxgKbiAeGh+a1kk6Pwnc2ZEuKviLyDSG6t0uXl56q7AALIxoM6FJrBSsVXA== dependencies: debug "^4.1.0" @@ -3682,44 +3753,49 @@ nock@^11.3.3: node-abort-controller@^3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/node-abort-controller/-/node-abort-controller-3.1.1.tgz#a94377e964a9a37ac3976d848cb5c765833b8548" + resolved "https://registry.npmjs.org/node-abort-controller/-/node-abort-controller-3.1.1.tgz" integrity sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ== node-addon-api@^6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76" + resolved "https://registry.npmjs.org/node-addon-api/-/node-addon-api-6.1.0.tgz" integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== node-gyp-build@<4.0, node-gyp-build@^3.9.0: version "3.9.0" - resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-3.9.0.tgz#53a350187dd4d5276750da21605d1cb681d09e25" + resolved "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-3.9.0.tgz" integrity sha512-zLcTg6P4AbcHPq465ZMFNXx7XpKKJh+7kkN699NiQWisR2uWYOWNWqRHAmbnmKiL4e9aLSlmy5U7rEMUXV59+A== node-gyp-build@^4.5.0: version "4.5.0" - resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.5.0.tgz#7a64eefa0b21112f89f58379da128ac177f20e40" + resolved "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.5.0.tgz" integrity sha512-2iGbaQBV+ITgCz76ZEjmhUKAKVf7xfY1sRl4UiKQspfZMH2h06SyhNsnSVy50cwkFQDGLyif6m/6uFXHkOZ6rg== node-preload@^0.2.1: version "0.2.1" - resolved "https://registry.yarnpkg.com/node-preload/-/node-preload-0.2.1.tgz#c03043bb327f417a18fee7ab7ee57b408a144301" + resolved "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz" integrity sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ== dependencies: process-on-spawn "^1.0.0" +node-releases@^2.0.14: + version "2.0.14" + resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.14.tgz#2ffb053bceb8b2be8495ece1ab6ce600c4461b0b" + integrity "sha1-L/sFO864sr6Elezhq2zmAMRGGws= sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==" + node-releases@^2.0.6: version "2.0.6" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.6.tgz#8a7088c63a55e493845683ebf3c828d8c51c5503" + resolved "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz" integrity sha512-PiVXnNuFm5+iYkLBNeq5211hvO38y63T0i2KKh2KnUs3RpzJ+JtODFjkD8yjLwnDkTYF1eKXheUwdssR+NRZdg== normalize-path@^3.0.0, normalize-path@~3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" + resolved "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz" integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== nyc@^15.1.0: version "15.1.0" - resolved "https://registry.yarnpkg.com/nyc/-/nyc-15.1.0.tgz#1335dae12ddc87b6e249d5a1994ca4bdaea75f02" + resolved "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz" integrity sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A== dependencies: "@istanbuljs/load-nyc-config" "^1.0.0" @@ -3752,22 +3828,22 @@ nyc@^15.1.0: object-assign@^4.1.0, object-assign@^4.1.1: version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" + resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz" integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== object-inspect@^1.12.2, object-inspect@^1.9.0: version "1.12.2" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + resolved "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz" integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== object-inspect@^1.12.3: version "1.12.3" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" - integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== + resolved "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz" + integrity "sha1-umLf/WfuJWyMCG365p4BbNHxmLk= sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==" object-is@^1.1.5: version "1.1.5" - resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" + resolved "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz" integrity sha512-3cyDsyHgtmi7I7DfSSI2LDp6SK2lwvtbg0p0R1e0RvTqF5ceGx+K2dfSjm1bKDMVCFEDAQvy+o8c6a7VujOddw== dependencies: call-bind "^1.0.2" @@ -3775,12 +3851,12 @@ object-is@^1.1.5: object-keys@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" + resolved "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz" integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== object.assign@^4.1.4: version "4.1.4" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + resolved "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz" integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== dependencies: call-bind "^1.0.2" @@ -3790,7 +3866,7 @@ object.assign@^4.1.4: object.values@^1.1.5: version "1.1.5" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.5.tgz#959f63e3ce9ef108720333082131e4a459b716ac" + resolved "https://registry.npmjs.org/object.values/-/object.values-1.1.5.tgz" integrity sha512-QUZRW0ilQ3PnPpbNtgdNV1PDbEqLIiSFB3l+EnGtBQ/8SUTLj1PZwtQHABZtLgwpJZTSZhuGLOGk57Drx2IvYg== dependencies: call-bind "^1.0.2" @@ -3799,50 +3875,50 @@ object.values@^1.1.5: on-finished@2.4.1: version "2.4.1" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + resolved "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz" integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== dependencies: ee-first "1.1.1" on-net-listen@^1.1.1: version "1.1.2" - resolved "https://registry.yarnpkg.com/on-net-listen/-/on-net-listen-1.1.2.tgz#671e55a81c910fa7e5b1e4d506545e9ea0f2e11c" + resolved "https://registry.npmjs.org/on-net-listen/-/on-net-listen-1.1.2.tgz" integrity sha512-y1HRYy8s/RlcBvDUwKXSmkODMdx4KSuIvloCnQYJ2LdBBC1asY4HtfhXwe3UWknLakATZDnbzht2Ijw3M1EqFg== once@^1.3.0: version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" + resolved "https://registry.npmjs.org/once/-/once-1.4.0.tgz" integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w== dependencies: wrappy "1" onetime@^5.1.0: version "5.1.2" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" + resolved "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz" integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== dependencies: mimic-fn "^2.1.0" opener@^1.5.1: version "1.5.2" - resolved "https://registry.yarnpkg.com/opener/-/opener-1.5.2.tgz#5d37e1f35077b9dcac4301372271afdeb2a13598" + resolved "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz" integrity sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A== opentracing@>=0.12.1: version "0.14.7" - resolved "https://registry.yarnpkg.com/opentracing/-/opentracing-0.14.7.tgz#25d472bd0296dc0b64d7b94cbc995219031428f5" + resolved "https://registry.npmjs.org/opentracing/-/opentracing-0.14.7.tgz" integrity sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q== optimist@~0.3.5: version "0.3.7" - resolved "https://registry.yarnpkg.com/optimist/-/optimist-0.3.7.tgz#c90941ad59e4273328923074d2cf2e7cbc6ec0d9" + resolved "https://registry.npmjs.org/optimist/-/optimist-0.3.7.tgz" integrity sha512-TCx0dXQzVtSCg2OgY/bO9hjM9cV4XYx09TVK+s3+FhkjT6LovsLe+pPMzpWf+6yXK/hUizs2gUoTw3jHM0VaTQ== dependencies: wordwrap "~0.0.2" optionator@^0.9.1: version "0.9.1" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + resolved "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz" integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== dependencies: deep-is "^0.1.3" @@ -3854,59 +3930,59 @@ optionator@^0.9.1: own-or-env@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/own-or-env/-/own-or-env-1.0.2.tgz#84e78d2d5128f7ee8a59f741ad5aafb4256a7c89" + resolved "https://registry.npmjs.org/own-or-env/-/own-or-env-1.0.2.tgz" integrity sha512-NQ7v0fliWtK7Lkb+WdFqe6ky9XAzYmlkXthQrBbzlYbmFKoAYbDDcwmOm6q8kOuwSRXW8bdL5ORksploUJmWgw== dependencies: own-or "^1.0.0" own-or@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/own-or/-/own-or-1.0.0.tgz#4e877fbeda9a2ec8000fbc0bcae39645ee8bf8dc" + resolved "https://registry.npmjs.org/own-or/-/own-or-1.0.0.tgz" integrity sha512-NfZr5+Tdf6MB8UI9GLvKRs4cXY8/yB0w3xtt84xFdWy8hkGjn+JFc60VhzS/hFRfbyxFcGYMTjnF4Me+RbbqrA== p-limit@^2.2.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== dependencies: p-try "^2.0.0" p-limit@^3.0.2, p-limit@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b" + resolved "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz" integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ== dependencies: yocto-queue "^0.1.0" p-locate@^4.1.0: version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz" integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== dependencies: p-limit "^2.2.0" p-locate@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834" + resolved "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz" integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw== dependencies: p-limit "^3.0.2" p-map@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/p-map/-/p-map-3.0.0.tgz#d704d9af8a2ba684e2600d9a215983d4141a979d" + resolved "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz" integrity sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ== dependencies: aggregate-error "^3.0.0" p-try@^2.0.0: version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" + resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== package-hash@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/package-hash/-/package-hash-4.0.0.tgz#3537f654665ec3cc38827387fc904c163c54f506" + resolved "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz" integrity sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ== dependencies: graceful-fs "^4.1.15" @@ -3916,136 +3992,136 @@ package-hash@^4.0.0: pako@^1.0.3, pako@~1.0.2: version "1.0.11" - resolved "https://registry.yarnpkg.com/pako/-/pako-1.0.11.tgz#6c9599d340d54dfd3946380252a35705a6b992bf" + resolved "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz" integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== parent-module@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== dependencies: callsites "^3.0.0" parseurl@~1.3.3: version "1.3.3" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" + resolved "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz" integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== patch-console@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/patch-console/-/patch-console-1.0.0.tgz#19b9f028713feb8a3c023702a8cc8cb9f7466f9d" + resolved "https://registry.npmjs.org/patch-console/-/patch-console-1.0.0.tgz" integrity sha512-nxl9nrnLQmh64iTzMfyylSlRozL7kAXIaxw1fVcLYdyhNkJCRUzirRZTikXGJsg+hc4fqpneTK6iU2H1Q8THSA== path-exists@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" + resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz" integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== path-is-absolute@^1.0.0: version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" + resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg== path-key@^3.1.0: version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + resolved "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz" integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== path-parse@^1.0.7: version "1.0.7" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" + resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.7, path-to-regexp@^0.1.2: version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== path-to-regexp@^1.7.0: version "1.8.0" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== dependencies: isarray "0.0.1" path-type@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" + resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== pathval@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.1.tgz#8534e77a77ce7ac5a2512ea21e0fdb8fcf6c3d8d" + resolved "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz" integrity sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ== pg-connection-string@2.5.0: version "2.5.0" - resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34" + resolved "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz" integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ== picocolors@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c" + resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz" integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ== picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: version "2.3.1" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42" + resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== pkg-dir@^4.1.0: version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz" integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== dependencies: find-up "^4.0.0" platform@^1.3.3: version "1.3.6" - resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7" + resolved "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz" integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg== pprof-format@^2.0.7: version "2.0.7" - resolved "https://registry.yarnpkg.com/pprof-format/-/pprof-format-2.0.7.tgz#526e4361f8b37d16b2ec4bb0696b5292de5046a4" + resolved "https://registry.npmjs.org/pprof-format/-/pprof-format-2.0.7.tgz" integrity sha512-1qWaGAzwMpaXJP9opRa23nPnt2Egi7RMNoNBptEE/XwHbcn4fC2b/4U4bKc5arkGkIh2ZabpF2bEb+c5GNHEKA== prelude-ls@^1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz" integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== pretty-bytes@^5.3.0: version "5.6.0" - resolved "https://registry.yarnpkg.com/pretty-bytes/-/pretty-bytes-5.6.0.tgz#356256f643804773c82f64723fe78c92c62beaeb" + resolved "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.6.0.tgz" integrity sha512-FFw039TmrBqFK8ma/7OL3sDz/VytdtJr044/QUJtH0wK9lb9jLq9tJyIxUwtQJHwar2BqtiA4iCWSwo9JLkzFg== process-nextick-args@~2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" + resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== process-on-spawn@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/process-on-spawn/-/process-on-spawn-1.0.0.tgz#95b05a23073d30a17acfdc92a440efd2baefdc93" + resolved "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz" integrity sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg== dependencies: fromentries "^1.2.0" progress@^2.0.3: version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" + resolved "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz" integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== propagate@^2.0.0: version "2.0.1" - resolved "https://registry.yarnpkg.com/propagate/-/propagate-2.0.1.tgz#40cdedab18085c792334e64f0ac17256d38f9a45" + resolved "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz" integrity sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag== -protobufjs@^7.2.4: - version "7.2.4" - resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.2.4.tgz#3fc1ec0cdc89dd91aef9ba6037ba07408485c3ae" - integrity sha512-AT+RJgD2sH8phPmCf7OUZR8xGdcJRga4+1cOaXJ64hvcSkVhNcRHOwIxUatPH15+nj59WAGTDv3LSGZPEQbJaQ== +protobufjs@^7.2.5: + version "7.2.5" + resolved "https://registry.npmjs.org/protobufjs/-/protobufjs-7.2.5.tgz" + integrity "sha1-RdXFc4em0poXqraEbcwoP5uOfy0= sha512-gGXRSXvxQ7UiPgfw8gevrfRWcTlSbOFg+p/N+JVJEK5VhueL2miT6qTymqAmjr1Q5WbOCyJbyrk6JfWKwlFn6A==" dependencies: "@protobufjs/aspromise" "^1.1.2" "@protobufjs/base64" "^1.1.2" @@ -4062,7 +4138,7 @@ protobufjs@^7.2.4: proxy-addr@~2.0.7: version "2.0.7" - resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025" + resolved "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz" integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg== dependencies: forwarded "0.2.0" @@ -4070,7 +4146,7 @@ proxy-addr@~2.0.7: proxyquire@^1.8.0: version "1.8.0" - resolved "https://registry.yarnpkg.com/proxyquire/-/proxyquire-1.8.0.tgz#02d514a5bed986f04cbb2093af16741535f79edc" + resolved "https://registry.npmjs.org/proxyquire/-/proxyquire-1.8.0.tgz" integrity sha512-mZZq4F50qaBkngvlf9paNfaSb5gtJ0mFPnBjda4NxCpXpMAaVfSLguRr9y2KXF6koOSBf4AanD2inuEQw3aCcA== dependencies: fill-keys "^1.0.2" @@ -4079,56 +4155,56 @@ proxyquire@^1.8.0: punycode@1.3.2: version "1.3.2" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" - integrity sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw== + resolved "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz" + integrity "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==" punycode@^2.0.0: version "2.3.0" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f" + resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz" integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA== punycode@^2.1.0: version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" + resolved "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== qs@6.11.0: version "6.11.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" - integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== + resolved "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz" + integrity "sha1-/Q2WNEb3pl4TZ+AavYVClFPww3o= sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==" dependencies: side-channel "^1.0.4" querystring@0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" - integrity sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g== + resolved "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz" + integrity "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==" queue-microtask@^1.2.2: version "1.2.3" - resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + resolved "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz" integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== rambda@^7.1.0: version "7.4.0" - resolved "https://registry.yarnpkg.com/rambda/-/rambda-7.4.0.tgz#61ec9de31d3dd6affe804de3bae04a5b818781e5" + resolved "https://registry.npmjs.org/rambda/-/rambda-7.4.0.tgz" integrity sha512-A9hihu7dUTLOUCM+I8E61V4kRXnN4DwYeK0DwCBydC1MqNI1PidyAtbtpsJlBBzK4icSctEcCQ1bGcLpBuETUQ== randombytes@^2.1.0: version "2.1.0" - resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" + resolved "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz" integrity sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ== dependencies: safe-buffer "^5.1.0" range-parser@~1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" + resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== raw-body@2.5.1: version "2.5.1" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" + resolved "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz" integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== dependencies: bytes "3.1.2" @@ -4138,8 +4214,8 @@ raw-body@2.5.1: raw-body@2.5.2: version "2.5.2" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" - integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== + resolved "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz" + integrity "sha1-mf69g7kOCJdQh+jx+UGaFJNmtoo= sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==" dependencies: bytes "3.1.2" http-errors "2.0.0" @@ -4147,16 +4223,16 @@ raw-body@2.5.2: unpipe "1.0.0" react-devtools-core@^4.19.1: - version "4.27.5" - resolved "https://registry.yarnpkg.com/react-devtools-core/-/react-devtools-core-4.27.5.tgz#35e41c09e7662ea29948d3caaeeea82f068cbbac" - integrity sha512-QJTriF1V4oyIenViCvM6qQuvcevQsp0sbKkHBZIQOij+AwY9DdOBY+dOeuymUqO5zV61CbmGxWsAIjeWlFS++w== + version "4.28.5" + resolved "https://registry.yarnpkg.com/react-devtools-core/-/react-devtools-core-4.28.5.tgz#c8442b91f068cdf0c899c543907f7f27d79c2508" + integrity "sha1-yEQrkfBozfDImcVDkH9/J9ecJQg= sha512-cq/o30z9W2Wb4rzBefjv5fBalHU0rJGZCHAkf/RHSBWSSYwh8PlQTqqOJmgIIbBtpj27T6FIPXeomIjZtCNVqA==" dependencies: shell-quote "^1.6.1" ws "^7" react-reconciler@^0.26.2: version "0.26.2" - resolved "https://registry.yarnpkg.com/react-reconciler/-/react-reconciler-0.26.2.tgz#bbad0e2d1309423f76cf3c3309ac6c96e05e9d91" + resolved "https://registry.npmjs.org/react-reconciler/-/react-reconciler-0.26.2.tgz" integrity sha512-nK6kgY28HwrMNwDnMui3dvm3rCFjZrcGiuwLc5COUipBK5hWHLOxMJhSnSomirqWwjPBJKV1QcbkI0VJr7Gl1Q== dependencies: loose-envify "^1.1.0" @@ -4165,7 +4241,7 @@ react-reconciler@^0.26.2: react@^17.0.2: version "17.0.2" - resolved "https://registry.yarnpkg.com/react/-/react-17.0.2.tgz#d0b5cc516d29eb3eee383f75b62864cfb6800037" + resolved "https://registry.npmjs.org/react/-/react-17.0.2.tgz" integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA== dependencies: loose-envify "^1.1.0" @@ -4173,7 +4249,7 @@ react@^17.0.2: readable-stream@^2.2.2, readable-stream@~2.3.6: version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz" integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== dependencies: core-util-is "~1.0.0" @@ -4186,35 +4262,35 @@ readable-stream@^2.2.2, readable-stream@~2.3.6: readdirp@~3.5.0: version "3.5.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.5.0.tgz#9ba74c019b15d365278d2e91bb8c48d7b4d42c9e" + resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz" integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ== dependencies: picomatch "^2.2.1" readdirp@~3.6.0: version "3.6.0" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7" + resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA== dependencies: picomatch "^2.2.1" rechoir@^0.8.0: version "0.8.0" - resolved "https://registry.yarnpkg.com/rechoir/-/rechoir-0.8.0.tgz#49f866e0d32146142da3ad8f0eff352b3215ff22" + resolved "https://registry.npmjs.org/rechoir/-/rechoir-0.8.0.tgz" integrity sha512-/vxpCXddiX8NGfGO/mTafwjq4aFa/71pvamip0++IQk3zG8cbCj0fifNPrjjF1XMXUne91jL9OoxmdykoEtifQ== dependencies: resolve "^1.20.0" redeyed@~2.1.0: version "2.1.1" - resolved "https://registry.yarnpkg.com/redeyed/-/redeyed-2.1.1.tgz#8984b5815d99cb220469c99eeeffe38913e6cc0b" + resolved "https://registry.npmjs.org/redeyed/-/redeyed-2.1.1.tgz" integrity sha512-FNpGGo1DycYAdnrKFxCMmKYgo/mILAqtRYbkdQD8Ep/Hk2PQ5+aEAEx+IU713RTDmuBaH0c8P5ZozurNu5ObRQ== dependencies: esprima "~4.0.0" regexp.prototype.flags@^1.4.3: version "1.4.3" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + resolved "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz" integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== dependencies: call-bind "^1.0.2" @@ -4223,8 +4299,8 @@ regexp.prototype.flags@^1.4.3: regexp.prototype.flags@^1.5.0: version "1.5.0" - resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz#fe7ce25e7e4cca8db37b6634c8a2c7009199b9cb" - integrity sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA== + resolved "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.0.tgz" + integrity "sha1-/nziXn5Myo2ze2Y0yKLHAJGZucs= sha512-0SutC3pNudRKgquxGoRGIz946MZVHqbNfPjBdxeOhBrdgDKlRoXmYLQN9xRbrR09ZXWeGAdPuif7egofn6v5LA==" dependencies: call-bind "^1.0.2" define-properties "^1.2.0" @@ -4232,49 +4308,49 @@ regexp.prototype.flags@^1.5.0: regexpp@^3.0.0, regexpp@^3.2.0: version "3.2.0" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + resolved "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz" integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== reinterval@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/reinterval/-/reinterval-1.1.0.tgz#3361ecfa3ca6c18283380dd0bb9546f390f5ece7" + resolved "https://registry.npmjs.org/reinterval/-/reinterval-1.1.0.tgz" integrity sha512-QIRet3SYrGp0HUHO88jVskiG6seqUGC5iAG7AwI/BV4ypGcuqk9Du6YQBUOUqm9c8pw1eyLoIaONifRua1lsEQ== release-zalgo@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/release-zalgo/-/release-zalgo-1.0.0.tgz#09700b7e5074329739330e535c5a90fb67851730" + resolved "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz" integrity sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA== dependencies: es6-error "^4.0.1" require-directory@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" + resolved "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz" integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== require-main-filename@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" + resolved "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== resolve-from@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz" integrity sha512-GnlH6vxLymXJNMBo7XP1fJIzBFbdYt49CuTwmB/6N53t+kMPRMFKz783LlQ4tv28XoQfMWinAJX6WCGf2IlaIw== resolve-from@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== resolve-from@^5.0.0: version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" + resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz" integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== resolve@^1.20.0, resolve@^1.22.0: version "1.22.1" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz" integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== dependencies: is-core-module "^2.9.0" @@ -4283,7 +4359,7 @@ resolve@^1.20.0, resolve@^1.22.0: resolve@^1.22.1, resolve@^1.3.3: version "1.22.2" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.2.tgz#0ed0943d4e301867955766c9f3e1ae6d01c6845f" + resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.2.tgz" integrity sha512-Sb+mjNHOULsBv818T40qSPeRiuWLyaGMa5ewydRLFimneixmVy2zdivRl+AF6jaYPC8ERxGDmFSiqui6SfPd+g== dependencies: is-core-module "^2.11.0" @@ -4292,8 +4368,8 @@ resolve@^1.22.1, resolve@^1.3.3: resolve@^2.0.0-next.4: version "2.0.0-next.4" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-2.0.0-next.4.tgz#3d37a113d6429f496ec4752d2a2e58efb1fd4660" - integrity sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ== + resolved "https://registry.npmjs.org/resolve/-/resolve-2.0.0-next.4.tgz" + integrity "sha1-PTehE9ZCn0luxHUtKi5Y77H9RmA= sha512-iMDbmAWtfU+MHpxt/I5iWI7cY6YVEZUQ3MBgPQ++XD1PELuJHIl82xBmObyP2KyQmkNB2dsqF7seoQQiAn5yDQ==" dependencies: is-core-module "^2.9.0" path-parse "^1.0.7" @@ -4301,12 +4377,12 @@ resolve@^2.0.0-next.4: resolve@~1.1.7: version "1.1.7" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" + resolved "https://registry.npmjs.org/resolve/-/resolve-1.1.7.tgz" integrity sha512-9znBF0vBcaSN3W2j7wKvdERPwqTxSpCq+if5C0WoTCyV9n24rua28jeuQ2pL/HOf+yUe/Mef+H/5p60K0Id3bg== restore-cursor@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" + resolved "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz" integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== dependencies: onetime "^5.1.0" @@ -4314,44 +4390,44 @@ restore-cursor@^3.1.0: resumer@^0.0.0: version "0.0.0" - resolved "https://registry.yarnpkg.com/resumer/-/resumer-0.0.0.tgz#f1e8f461e4064ba39e82af3cdc2a8c893d076759" + resolved "https://registry.npmjs.org/resumer/-/resumer-0.0.0.tgz" integrity sha512-Fn9X8rX8yYF4m81rZCK/5VmrmsSbqS/i3rDLl6ZZHAXgC2nTAx3dhwG8q8odP/RmdLa2YrybDJaAMg+X1ajY3w== dependencies: through "~2.3.4" retimer@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/retimer/-/retimer-2.0.0.tgz#e8bd68c5e5a8ec2f49ccb5c636db84c04063bbca" + resolved "https://registry.npmjs.org/retimer/-/retimer-2.0.0.tgz" integrity sha512-KLXY85WkEq2V2bKex/LOO1ViXVn2KGYe4PYysAdYdjmraYIUsVkXu8O4am+8+5UbaaGl1qho4aqAAPHNQ4GSbg== retry@^0.13.1: version "0.13.1" - resolved "https://registry.yarnpkg.com/retry/-/retry-0.13.1.tgz#185b1587acf67919d63b357349e03537b2484658" - integrity sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg== + resolved "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz" + integrity "sha1-GFsVh6z2eRnWOzVzSeA1N7JIRlg= sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==" reusify@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + resolved "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + resolved "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz" integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== dependencies: glob "^7.1.3" run-parallel@^1.1.9: version "1.2.0" - resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + resolved "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz" integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== dependencies: queue-microtask "^1.2.2" safe-array-concat@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.0.0.tgz#2064223cba3c08d2ee05148eedbc563cd6d84060" - integrity sha512-9dVEFruWIsnie89yym+xWTAYASdpw3CJV7Li/6zBewGf9z2i1j31rP6jnY0pHEO4QZh6N0K11bFjWmdR8UGdPQ== + resolved "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.0.0.tgz" + integrity "sha1-IGQiPLo8CNLuBRSO7bxWPNbYQGA= sha512-9dVEFruWIsnie89yym+xWTAYASdpw3CJV7Li/6zBewGf9z2i1j31rP6jnY0pHEO4QZh6N0K11bFjWmdR8UGdPQ==" dependencies: call-bind "^1.0.2" get-intrinsic "^1.2.0" @@ -4360,17 +4436,17 @@ safe-array-concat@^1.0.0: safe-buffer@5.2.1, safe-buffer@^5.1.0: version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-regex-test@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + resolved "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz" integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== dependencies: call-bind "^1.0.2" @@ -4379,22 +4455,22 @@ safe-regex-test@^1.0.0: "safer-buffer@>= 2.1.2 < 3": version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" + resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== sax@1.2.1: version "1.2.1" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" - integrity sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA== + resolved "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz" + integrity "sha1-e45lYZCyKOgaZq6nSEgNgozS03o= sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==" sax@>=0.6.0: version "1.3.0" - resolved "https://registry.yarnpkg.com/sax/-/sax-1.3.0.tgz#a5dbe77db3be05c9d1ee7785dbd3ea9de51593d0" - integrity sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA== + resolved "https://registry.npmjs.org/sax/-/sax-1.3.0.tgz" + integrity "sha1-pdvnfbO+BcnR7neF29PqneUVk9A= sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==" scheduler@^0.20.2: version "0.20.2" - resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.20.2.tgz#4baee39436e34aa93b4874bddcbf0fe8b8b50e91" + resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz" integrity sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ== dependencies: loose-envify "^1.1.0" @@ -4402,38 +4478,38 @@ scheduler@^0.20.2: semver@5.3.0: version "5.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.3.0.tgz#9b2ce5d3de02d17c6012ad326aa6b4d0cf54f94f" + resolved "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz" integrity sha512-mfmm3/H9+67MCVix1h+IXTpDwL6710LyHuk7+cWC9T1mE0qz4iHhh6r4hU2wrIT9iTsAAC2XQRvfblL028cpLw== -semver@^6.0.0, semver@^6.3.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== +semver@^6.0.0, semver@^6.3.0, semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== semver@^7.0.0: version "7.4.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.4.0.tgz#8481c92feffc531ab1e012a8ffc15bdd3a0f4318" - integrity sha512-RgOxM8Mw+7Zus0+zcLEUn8+JfoLpj/huFTItQy2hsM4khuC1HYRDp0cU482Ewn/Fcy6bCjufD8vAj7voC66KQw== + resolved "https://registry.npmjs.org/semver/-/semver-7.4.0.tgz" + integrity "sha1-hIHJL+/8Uxqx4BKo/8Fb3ToPQxg= sha512-RgOxM8Mw+7Zus0+zcLEUn8+JfoLpj/huFTItQy2hsM4khuC1HYRDp0cU482Ewn/Fcy6bCjufD8vAj7voC66KQw==" dependencies: lru-cache "^6.0.0" semver@^7.3.8: version "7.5.3" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e" - integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ== + resolved "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz" + integrity "sha1-Fhzowsa0s73KbKrcn6MxekxP6I4= sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==" dependencies: lru-cache "^6.0.0" semver@^7.5.4: version "7.5.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" - integrity sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA== + resolved "https://registry.npmjs.org/semver/-/semver-7.5.4.tgz" + integrity "sha1-SDmG7E7TjhxsSMNIlKkYLb/2im4= sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==" dependencies: lru-cache "^6.0.0" send@0.18.0: version "0.18.0" - resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + resolved "https://registry.npmjs.org/send/-/send-0.18.0.tgz" integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== dependencies: debug "2.6.9" @@ -4452,14 +4528,14 @@ send@0.18.0: serialize-javascript@5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/serialize-javascript/-/serialize-javascript-5.0.1.tgz#7886ec848049a462467a97d3d918ebb2aaf934f4" + resolved "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz" integrity sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA== dependencies: randombytes "^2.1.0" serve-static@1.15.0: version "1.15.0" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + resolved "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz" integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== dependencies: encodeurl "~1.0.2" @@ -4469,13 +4545,13 @@ serve-static@1.15.0: set-blocking@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" + resolved "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz" integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== set-function-length@^1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.1.1.tgz#4bc39fafb0307224a33e106a7d35ca1218d659ed" - integrity sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ== + resolved "https://registry.npmjs.org/set-function-length/-/set-function-length-1.1.1.tgz" + integrity "sha1-S8Ofr7AwciSjPhBqfTXKEhjWWe0= sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==" dependencies: define-data-property "^1.1.1" get-intrinsic "^1.2.1" @@ -4484,34 +4560,34 @@ set-function-length@^1.1.1: setimmediate@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/setimmediate/-/setimmediate-1.0.5.tgz#290cbb232e306942d7d7ea9b83732ab7856f8285" + resolved "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz" integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== setprototypeof@1.2.0: version "1.2.0" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424" + resolved "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz" integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw== shebang-command@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz" integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== dependencies: shebang-regex "^3.0.0" shebang-regex@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== shell-quote@^1.6.1: version "1.8.1" - resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680" - integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== + resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.1.tgz" + integrity "sha1-bb9Nt1UVrVusY7TxiUw6FUx2ZoA= sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA==" side-channel@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" + resolved "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz" integrity sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw== dependencies: call-bind "^1.0.0" @@ -4520,18 +4596,18 @@ side-channel@^1.0.4: signal-exit@^3.0.2, signal-exit@^3.0.4, signal-exit@^3.0.6: version "3.0.7" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" + resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== sinon-chai@^3.7.0: version "3.7.0" - resolved "https://registry.yarnpkg.com/sinon-chai/-/sinon-chai-3.7.0.tgz#cfb7dec1c50990ed18c153f1840721cf13139783" + resolved "https://registry.npmjs.org/sinon-chai/-/sinon-chai-3.7.0.tgz" integrity sha512-mf5NURdUaSdnatJx3uhoBOrY9dtL19fiOtAdT1Azxg3+lNJFiuN0uzaU3xX1LeAfL17kHQhTAJgpsfhbMJMY2g== sinon@^15.2.0: version "15.2.0" - resolved "https://registry.yarnpkg.com/sinon/-/sinon-15.2.0.tgz#5e44d4bc5a9b5d993871137fd3560bebfac27565" - integrity sha512-nPS85arNqwBXaIsFCkolHjGIkFo+Oxu9vbgmBJizLAhqe6P2o3Qmj3KCUoRkfhHtvgDhZdWD3risLHAUJ8npjw== + resolved "https://registry.npmjs.org/sinon/-/sinon-15.2.0.tgz" + integrity "sha1-XkTUvFqbXZk4cRN/01YL6/rCdWU= sha512-nPS85arNqwBXaIsFCkolHjGIkFo+Oxu9vbgmBJizLAhqe6P2o3Qmj3KCUoRkfhHtvgDhZdWD3risLHAUJ8npjw==" dependencies: "@sinonjs/commons" "^3.0.0" "@sinonjs/fake-timers" "^10.3.0" @@ -4542,12 +4618,12 @@ sinon@^15.2.0: slash@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" + resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz" integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== slice-ansi@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-3.0.0.tgz#31ddc10930a1b7e0b67b08c96c2f49b77a789787" + resolved "https://registry.npmjs.org/slice-ansi/-/slice-ansi-3.0.0.tgz" integrity sha512-pSyv7bSTC7ig9Dcgbw9AuRNUb5k5V6oDudjZoMBSr13qpLBG7tB+zgCkARjq7xIUgdz5P1Qe8u+rSGdouOOIyQ== dependencies: ansi-styles "^4.0.0" @@ -4556,7 +4632,7 @@ slice-ansi@^3.0.0: source-map-support@^0.5.16: version "0.5.21" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f" + resolved "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz" integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w== dependencies: buffer-from "^1.0.0" @@ -4564,17 +4640,17 @@ source-map-support@^0.5.16: source-map@^0.6.0, source-map@^0.6.1: version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== source-map@^0.7.4: version "0.7.4" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.4.tgz#a9bbe705c9d8846f4e08ff6765acf0f1b0898656" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz" integrity sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA== spawn-wrap@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/spawn-wrap/-/spawn-wrap-2.0.0.tgz#103685b8b8f9b79771318827aa78650a610d457e" + resolved "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz" integrity sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg== dependencies: foreground-child "^2.0.0" @@ -4586,36 +4662,36 @@ spawn-wrap@^2.0.0: sprintf-js@~1.0.2: version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" + resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== stack-utils@^2.0.2, stack-utils@^2.0.4: version "2.0.6" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" + resolved "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz" integrity sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ== dependencies: escape-string-regexp "^2.0.0" statuses@2.0.1: version "2.0.1" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== stop-iteration-iterator@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz#6a60be0b4ee757d1ed5254858ec66b10c49285e4" - integrity sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ== + resolved "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz" + integrity "sha1-amC+C07nV9HtUlSFjsZrEMSSheQ= sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==" dependencies: internal-slot "^1.0.4" streamsearch@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" + resolved "https://registry.npmjs.org/streamsearch/-/streamsearch-1.1.0.tgz" integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== "string-width@^1.0.2 || 2", string-width@^2.1.1: version "2.1.1" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" + resolved "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz" integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== dependencies: is-fullwidth-code-point "^2.0.0" @@ -4623,7 +4699,7 @@ streamsearch@^1.1.0: string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2: version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== dependencies: emoji-regex "^8.0.0" @@ -4632,8 +4708,8 @@ string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2 string.prototype.trim@^1.2.7: version "1.2.7" - resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz#a68352740859f6893f14ce3ef1bb3037f7a90533" - integrity sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg== + resolved "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.7.tgz" + integrity "sha1-poNSdAhZ9ok/FM4+8bswN/epBTM= sha512-p6TmeT1T3411M8Cgg9wBTMRtY2q9+PNy9EV1i2lIXUN/btt763oIfxwN3RR8VU6wHX8j/1CFy0L+YuThm6bgOg==" dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -4641,7 +4717,7 @@ string.prototype.trim@^1.2.7: string.prototype.trimend@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + resolved "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz" integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== dependencies: call-bind "^1.0.2" @@ -4650,8 +4726,8 @@ string.prototype.trimend@^1.0.5: string.prototype.trimend@^1.0.6: version "1.0.6" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz#c4a27fa026d979d79c04f17397f250a462944533" - integrity sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ== + resolved "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz" + integrity "sha1-xKJ/oCbZedecBPFzl/JQpGKURTM= sha512-JySq+4mrPf9EsDBEDYMOb/lM7XQLulwg5R/m1r0PXEFqrV0qHvl58sdTilSXtKOflCsK2E8jxf+GKC0T07RWwQ==" dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -4659,7 +4735,7 @@ string.prototype.trimend@^1.0.6: string.prototype.trimstart@^1.0.5: version "1.0.5" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + resolved "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz" integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== dependencies: call-bind "^1.0.2" @@ -4668,8 +4744,8 @@ string.prototype.trimstart@^1.0.5: string.prototype.trimstart@^1.0.6: version "1.0.6" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz#e90ab66aa8e4007d92ef591bbf3cd422c56bdcf4" - integrity sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA== + resolved "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz" + integrity "sha1-6Qq2aqjkAH2S71kbvzzUIsVr3PQ= sha512-omqjMDaY92pbn5HOX7f9IccLA+U1tA9GvtU4JrodiXFfYB7jPzzHpRzpglLAjtUV6bB557zwClJezTqnAiYnQA==" dependencies: call-bind "^1.0.2" define-properties "^1.1.4" @@ -4677,69 +4753,69 @@ string.prototype.trimstart@^1.0.6: string_decoder@~1.1.1: version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== dependencies: safe-buffer "~5.1.0" strip-ansi@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz" integrity sha512-4XaJ2zQdCzROZDivEVIDPkcQn8LMFSa8kj8Gxb/Lnwzv9A8VctNZ+lfivC/sV3ivW8ElJTERXZoPBRrZKkNKow== dependencies: ansi-regex "^3.0.0" strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== dependencies: ansi-regex "^5.0.1" strip-bom@^3.0.0: version "3.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3" + resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz" integrity sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA== strip-bom@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" + resolved "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz" integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== strip-json-comments@3.1.1, strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" + resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== supports-color@8.1.1: version "8.1.1" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-8.1.1.tgz#cd6fc17e28500cff56c1b86c0a7fd4a54a73005c" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz" integrity sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q== dependencies: has-flag "^4.0.0" supports-color@^5.3.0: version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" supports-color@^7.1.0, supports-color@^7.2.0: version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" + resolved "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz" integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== dependencies: has-flag "^4.0.0" supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" + resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== tap-mocha-reporter@^5.0.3: version "5.0.3" - resolved "https://registry.yarnpkg.com/tap-mocha-reporter/-/tap-mocha-reporter-5.0.3.tgz#3e261b2a43092ba8bc0cb67a89b33e283decee05" + resolved "https://registry.npmjs.org/tap-mocha-reporter/-/tap-mocha-reporter-5.0.3.tgz" integrity sha512-6zlGkaV4J+XMRFkN0X+yuw6xHbE9jyCZ3WUKfw4KxMyRGOpYSRuuQTRJyWX88WWuLdVTuFbxzwXhXuS2XE6o0g== dependencies: color-support "^1.1.0" @@ -4753,7 +4829,7 @@ tap-mocha-reporter@^5.0.3: tap-parser@^11.0.0, tap-parser@^11.0.2: version "11.0.2" - resolved "https://registry.yarnpkg.com/tap-parser/-/tap-parser-11.0.2.tgz#5d3e76e2cc521e23a8c50201487b273ca0fba800" + resolved "https://registry.npmjs.org/tap-parser/-/tap-parser-11.0.2.tgz" integrity sha512-6qGlC956rcORw+fg7Fv1iCRAY8/bU9UabUAhs3mXRH6eRmVZcNPLheSXCYaVaYeSwx5xa/1HXZb1537YSvwDZg== dependencies: events-to-array "^1.0.1" @@ -4762,15 +4838,15 @@ tap-parser@^11.0.0, tap-parser@^11.0.2: tap-yaml@^1.0.0, tap-yaml@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/tap-yaml/-/tap-yaml-1.0.2.tgz#62032a459e5524e10661c19ee9df5d33d78812fa" + resolved "https://registry.npmjs.org/tap-yaml/-/tap-yaml-1.0.2.tgz" integrity sha512-GegASpuqBnRNdT1U+yuUPZ8rEU64pL35WPBpCISWwff4dErS2/438barz7WFJl4Nzh3Y05tfPidZnH+GaV1wMg== dependencies: yaml "^1.10.2" tap@^16.3.7: version "16.3.7" - resolved "https://registry.yarnpkg.com/tap/-/tap-16.3.7.tgz#1d3561b58dd7af3aed172a2f6fc3ad8252b040ab" - integrity sha512-AaovVsfXVKcIf9eD1NxgwIqSDz5LauvybTpS6bjAKVYqz3+iavHC1abwxTkXmswb2n7eq8qKLt8DvY3D6iWcYA== + resolved "https://registry.npmjs.org/tap/-/tap-16.3.7.tgz" + integrity "sha1-HTVhtY3XrzrtFyovb8OtglKwQKs= sha512-AaovVsfXVKcIf9eD1NxgwIqSDz5LauvybTpS6bjAKVYqz3+iavHC1abwxTkXmswb2n7eq8qKLt8DvY3D6iWcYA==" dependencies: "@isaacs/import-jsx" "^4.0.1" "@types/react" "^17.0.52" @@ -4801,8 +4877,8 @@ tap@^16.3.7: tape@^5.6.5: version "5.6.5" - resolved "https://registry.yarnpkg.com/tape/-/tape-5.6.5.tgz#a4dd5c6fb035fcee5b89a069cf8e98c6cbf40959" - integrity sha512-r6XcLeO3h5rOFpkYWifAjlhSSSXbFSSBF86lhb6J0KAQbY91H1MzOeIWG6TH0iWS52ypwr6fenJgCGQGtL8CxA== + resolved "https://registry.npmjs.org/tape/-/tape-5.6.5.tgz" + integrity "sha1-pN1cb7A1/O5biaBpz46Yxsv0CVk= sha512-r6XcLeO3h5rOFpkYWifAjlhSSSXbFSSBF86lhb6J0KAQbY91H1MzOeIWG6TH0iWS52ypwr6fenJgCGQGtL8CxA==" dependencies: array.prototype.every "^1.1.4" call-bind "^1.0.2" @@ -4828,19 +4904,19 @@ tape@^5.6.5: tarn@^3.0.2: version "3.0.2" - resolved "https://registry.yarnpkg.com/tarn/-/tarn-3.0.2.tgz#73b6140fbb881b71559c4f8bfde3d9a4b3d27693" + resolved "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz" integrity sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ== tcompare@^5.0.6, tcompare@^5.0.7: version "5.0.7" - resolved "https://registry.yarnpkg.com/tcompare/-/tcompare-5.0.7.tgz#8c2d647208031ed5cac5e573428149e16f795bbf" + resolved "https://registry.npmjs.org/tcompare/-/tcompare-5.0.7.tgz" integrity sha512-d9iddt6YYGgyxJw5bjsN7UJUO1kGOtjSlNy/4PoGYAjQS5pAT/hzIoLf1bZCw+uUxRmZJh7Yy1aA7xKVRT9B4w== dependencies: diff "^4.0.2" test-exclude@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" + resolved "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz" integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== dependencies: "@istanbuljs/schema" "^0.1.2" @@ -4849,44 +4925,44 @@ test-exclude@^6.0.0: text-table@^0.2.0: version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + resolved "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz" integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== through@^2.3.8, through@~2.3.4: version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" + resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz" integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== tildify@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/tildify/-/tildify-2.0.0.tgz#f205f3674d677ce698b7067a99e949ce03b4754a" + resolved "https://registry.npmjs.org/tildify/-/tildify-2.0.0.tgz" integrity sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw== timestring@^6.0.0: version "6.0.0" - resolved "https://registry.yarnpkg.com/timestring/-/timestring-6.0.0.tgz#b0c7c331981ecf2066ce88bcfb8ee3ae32e7a0f6" + resolved "https://registry.npmjs.org/timestring/-/timestring-6.0.0.tgz" integrity sha512-wMctrWD2HZZLuIlchlkE2dfXJh7J2KDI9Dwl+2abPYg0mswQHfOAyQW3jJg1pY5VfttSINZuKcXoB3FGypVklA== to-fast-properties@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" + resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== to-regex-range@^5.0.1: version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" + resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== dependencies: is-number "^7.0.0" toidentifier@1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35" + resolved "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== treport@^3.0.4: version "3.0.4" - resolved "https://registry.yarnpkg.com/treport/-/treport-3.0.4.tgz#05247fa7820ad3afe92355e4cf08fe41a933084b" + resolved "https://registry.npmjs.org/treport/-/treport-3.0.4.tgz" integrity sha512-zUw1sfJypuoZi0I54woo6CNsfvMrv+OwLBD0/wc4LhMW8MA0MbSE+4fNObn22JSR8x9lOYccuAzfBfZ2IemzoQ== dependencies: "@isaacs/import-jsx" "^4.0.1" @@ -4900,12 +4976,12 @@ treport@^3.0.4: trivial-deferred@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/trivial-deferred/-/trivial-deferred-1.0.1.tgz#376d4d29d951d6368a6f7a0ae85c2f4d5e0658f3" - integrity sha512-dagAKX7vaesNNAwOc9Np9C2mJ+7YopF4lk+jE2JML9ta4kZ91Y6UruJNH65bLRYoUROD8EY+Pmi44qQWwXR7sw== + resolved "https://registry.npmjs.org/trivial-deferred/-/trivial-deferred-1.0.1.tgz" + integrity "sha1-N21NKdlR1jaKb3oK6FwvTV4GWPM= sha512-dagAKX7vaesNNAwOc9Np9C2mJ+7YopF4lk+jE2JML9ta4kZ91Y6UruJNH65bLRYoUROD8EY+Pmi44qQWwXR7sw==" tsconfig-paths@^3.14.1: version "3.14.1" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz#ba0734599e8ea36c862798e920bcf163277b137a" + resolved "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz" integrity sha512-fxDhWnFSLt3VuTwtvJt5fpwxBHg5AdKWMsgcPOOIilyjymcYVZoCQF8fvFRezCNfblEXmi+PcM1eYHeOAgXCOQ== dependencies: "@types/json5" "^0.0.29" @@ -4915,39 +4991,39 @@ tsconfig-paths@^3.14.1: type-check@^0.4.0, type-check@~0.4.0: version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + resolved "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz" integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== dependencies: prelude-ls "^1.2.1" type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5, type-detect@^4.0.8: version "4.0.8" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" + resolved "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== type-fest@^0.12.0: version "0.12.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.12.0.tgz#f57a27ab81c68d136a51fd71467eff94157fa1ee" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.12.0.tgz" integrity sha512-53RyidyjvkGpnWPMF9bQgFtWp+Sl8O2Rp13VavmJgfAP9WWG6q6TkrKU8iyJdnwnfgHI6k2hTlgqH4aSdjoTbg== type-fest@^0.20.2: version "0.20.2" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== type-fest@^0.21.3: version "0.21.3" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz" integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== type-fest@^0.8.0: version "0.8.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" + resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz" integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== type-is@^1.6.4, type-is@~1.6.18: version "1.6.18" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" + resolved "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz" integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== dependencies: media-typer "0.3.0" @@ -4955,8 +5031,8 @@ type-is@^1.6.4, type-is@~1.6.18: typed-array-buffer@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz#18de3e7ed7974b0a729d3feecb94338d1472cd60" - integrity sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw== + resolved "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz" + integrity "sha1-GN4+fteXSwpynT/uy5QzjRRyzWA= sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==" dependencies: call-bind "^1.0.2" get-intrinsic "^1.2.1" @@ -4964,8 +5040,8 @@ typed-array-buffer@^1.0.0: typed-array-byte-length@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz#d787a24a995711611fb2b87a4052799517b230d0" - integrity sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA== + resolved "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz" + integrity "sha1-14eiSplXEWEfsrh6QFJ5lReyMNA= sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==" dependencies: call-bind "^1.0.2" for-each "^0.3.3" @@ -4974,8 +5050,8 @@ typed-array-byte-length@^1.0.0: typed-array-byte-offset@^1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz#cbbe89b51fdef9cd6aaf07ad4707340abbc4ea0b" - integrity sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg== + resolved "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz" + integrity "sha1-y76JtR/e+c1qrwetRwc0CrvE6gs= sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==" dependencies: available-typed-arrays "^1.0.5" call-bind "^1.0.2" @@ -4985,8 +5061,8 @@ typed-array-byte-offset@^1.0.0: typed-array-length@^1.0.4: version "1.0.4" - resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" - integrity sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng== + resolved "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz" + integrity "sha1-idg3heXECYvscuCLMZZR8OrJwbs= sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==" dependencies: call-bind "^1.0.2" for-each "^0.3.3" @@ -4994,19 +5070,19 @@ typed-array-length@^1.0.4: typedarray-to-buffer@^3.1.5: version "3.1.5" - resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" + resolved "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz" integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== dependencies: is-typedarray "^1.0.0" typedarray@^0.0.6: version "0.0.6" - resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" + resolved "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz" integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== unbox-primitive@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + resolved "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz" integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== dependencies: call-bind "^1.0.2" @@ -5016,19 +5092,27 @@ unbox-primitive@^1.0.2: unicode-length@^2.0.2: version "2.1.0" - resolved "https://registry.yarnpkg.com/unicode-length/-/unicode-length-2.1.0.tgz#425202b99f21854f5ca3530cc2a08dc262ce619f" + resolved "https://registry.npmjs.org/unicode-length/-/unicode-length-2.1.0.tgz" integrity sha512-4bV582zTV9Q02RXBxSUMiuN/KHo5w4aTojuKTNT96DIKps/SIawFp7cS5Mu25VuY1AioGXrmYyzKZUzh8OqoUw== dependencies: punycode "^2.0.0" unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" + resolved "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== +update-browserslist-db@^1.0.13: + version "1.0.13" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz#3c5e4f5c083661bd38ef64b6328c26ed6c8248c4" + integrity "sha1-PF5PXAg2Yb0472S2Mowm7WyCSMQ= sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==" + dependencies: + escalade "^3.1.1" + picocolors "^1.0.0" + update-browserslist-db@^1.0.9: version "1.0.9" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.0.9.tgz#2924d3927367a38d5c555413a7ce138fc95fcb18" + resolved "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.9.tgz" integrity sha512-/xsqn21EGVdXI3EXSum1Yckj3ZVZugqyOZQ/CxYPBD/R+ko9NSUScf8tFF4dOKY+2pvSSJA/S+5B8s4Zr4kyvg== dependencies: escalade "^3.1.1" @@ -5036,28 +5120,28 @@ update-browserslist-db@^1.0.9: uri-js@^4.2.2: version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" + resolved "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz" integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== dependencies: punycode "^2.1.0" url@0.10.3: version "0.10.3" - resolved "https://registry.yarnpkg.com/url/-/url-0.10.3.tgz#021e4d9c7705f21bbf37d03ceb58767402774c64" - integrity sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ== + resolved "https://registry.npmjs.org/url/-/url-0.10.3.tgz" + integrity "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ= sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==" dependencies: punycode "1.3.2" querystring "0.2.0" util-deprecate@~1.0.1: version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" + resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== util@^0.12.4: version "0.12.5" - resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc" - integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA== + resolved "https://registry.npmjs.org/util/-/util-0.12.5.tgz" + integrity "sha1-XxemBZtz22GodWaHgaHCsTa9b7w= sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==" dependencies: inherits "^2.0.3" is-arguments "^1.0.4" @@ -5067,32 +5151,32 @@ util@^0.12.4: utils-merge@1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" + resolved "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz" integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== uuid-parse@^1.1.0: version "1.1.0" - resolved "https://registry.yarnpkg.com/uuid-parse/-/uuid-parse-1.1.0.tgz#7061c5a1384ae0e1f943c538094597e1b5f3a65b" + resolved "https://registry.npmjs.org/uuid-parse/-/uuid-parse-1.1.0.tgz" integrity sha512-OdmXxA8rDsQ7YpNVbKSJkNzTw2I+S5WsbMDnCtIWSQaosNAcWtFuI/YK1TjzUI6nbkgiqEyh8gWngfcv8Asd9A== uuid@8.0.0: version "8.0.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.0.0.tgz#bc6ccf91b5ff0ac07bbcdbf1c7c4e150db4dbb6c" - integrity sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw== + resolved "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz" + integrity "sha1-vGzPkbX/CsB7vNvxx8ThUNtNu2w= sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==" uuid@^8.3.2: version "8.3.2" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" + resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== vary@~1.1.2: version "1.1.2" - resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" + resolved "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== which-boxed-primitive@^1.0.2: version "1.0.2" - resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" + resolved "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz" integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== dependencies: is-bigint "^1.0.1" @@ -5103,8 +5187,8 @@ which-boxed-primitive@^1.0.2: which-collection@^1.0.1: version "1.0.1" - resolved "https://registry.yarnpkg.com/which-collection/-/which-collection-1.0.1.tgz#70eab71ebbbd2aefaf32f917082fc62cdcb70906" - integrity sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A== + resolved "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz" + integrity "sha1-cOq3Hru9Ku+vMvkXCC/GLNy3CQY= sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==" dependencies: is-map "^2.0.1" is-set "^2.0.1" @@ -5113,13 +5197,13 @@ which-collection@^1.0.1: which-module@^2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" + resolved "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz" integrity sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q== which-typed-array@^1.1.10, which-typed-array@^1.1.9: version "1.1.10" - resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.10.tgz#74baa2789991905c2076abb317103b866c64e69e" - integrity sha512-uxoA5vLUfRPdjCuJ1h5LlYdmTLbYfums398v3WLkM+i/Wltl2/XyZpQWKbN++ck5L64SR/grOHqtXCUKmlZPNA== + resolved "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.10.tgz" + integrity "sha1-dLqieJmRkFwgdquzFxA7hmxk5p4= sha512-uxoA5vLUfRPdjCuJ1h5LlYdmTLbYfums398v3WLkM+i/Wltl2/XyZpQWKbN++ck5L64SR/grOHqtXCUKmlZPNA==" dependencies: available-typed-arrays "^1.0.5" call-bind "^1.0.2" @@ -5130,8 +5214,8 @@ which-typed-array@^1.1.10, which-typed-array@^1.1.9: which-typed-array@^1.1.11, which-typed-array@^1.1.2: version "1.1.13" - resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.13.tgz#870cd5be06ddb616f504e7b039c4c24898184d36" - integrity sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow== + resolved "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz" + integrity "sha1-hwzVvgbdthb1BOewOcTCSJgYTTY= sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==" dependencies: available-typed-arrays "^1.0.5" call-bind "^1.0.4" @@ -5141,43 +5225,43 @@ which-typed-array@^1.1.11, which-typed-array@^1.1.2: which@2.0.2, which@^2.0.1, which@^2.0.2: version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + resolved "https://registry.npmjs.org/which/-/which-2.0.2.tgz" integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== dependencies: isexe "^2.0.0" wide-align@1.1.3: version "1.1.3" - resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457" + resolved "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz" integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA== dependencies: string-width "^1.0.2 || 2" widest-line@^3.1.0: version "3.1.0" - resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-3.1.0.tgz#8292333bbf66cb45ff0de1603b136b7ae1496eca" + resolved "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz" integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg== dependencies: string-width "^4.0.0" word-wrap@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== + version "1.2.5" + resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.5.tgz#d2c45c6dd4fbce621a66f136cbe328afd0410b34" + integrity sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA== wordwrap@~0.0.2: version "0.0.3" - resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-0.0.3.tgz#a3d5da6cd5c0bc0008d37234bbaf1bed63059107" + resolved "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz" integrity sha512-1tMA907+V4QmxV7dbRvb4/8MaRALK6q9Abid3ndMYnbyo8piisCmeONVqVSXqQA3KaP4SLt5b7ud6E2sqP8TFw== workerpool@6.1.0: version "6.1.0" - resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-6.1.0.tgz#a8e038b4c94569596852de7a8ea4228eefdeb37b" + resolved "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz" integrity sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg== wrap-ansi@^6.2.0: version "6.2.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz" integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== dependencies: ansi-styles "^4.0.0" @@ -5186,7 +5270,7 @@ wrap-ansi@^6.2.0: wrap-ansi@^7.0.0: version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== dependencies: ansi-styles "^4.0.0" @@ -5195,12 +5279,12 @@ wrap-ansi@^7.0.0: wrappy@1: version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" + resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== write-file-atomic@^3.0.0: version "3.0.3" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" + resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz" integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== dependencies: imurmurhash "^0.1.4" @@ -5210,60 +5294,60 @@ write-file-atomic@^3.0.0: ws@^7, ws@^7.5.5: version "7.5.9" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" - integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + resolved "https://registry.npmjs.org/ws/-/ws-7.5.9.tgz" + integrity "sha1-VPp9sp9MfOxosd3TqJ3gmZQrtZE= sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==" xml2js@0.5.0: version "0.5.0" - resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7" - integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA== + resolved "https://registry.npmjs.org/xml2js/-/xml2js-0.5.0.tgz" + integrity "sha1-2UQGMfuy7YACA/rRBvJyT2LEk7c= sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==" dependencies: sax ">=0.6.0" xmlbuilder "~11.0.0" xmlbuilder@~11.0.0: version "11.0.1" - resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" - integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== + resolved "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz" + integrity "sha1-vpuuHIoEbnazESdyY0fQrXACvrM= sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==" xtend@^4.0.0: version "4.0.2" - resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" + resolved "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== y18n@^4.0.0: version "4.0.3" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.3.tgz#b5f259c82cd6e336921efd7bfd8bf560de9eeedf" + resolved "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz" integrity sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ== y18n@^5.0.5: version "5.0.8" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.8.tgz#7f4934d0f7ca8c56f95314939ddcd2dd91ce1d55" + resolved "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz" integrity sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA== yallist@^3.0.2: version "3.1.1" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" + resolved "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== yallist@^4.0.0: version "4.0.0" - resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + resolved "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== yaml@^1.10.2: version "1.10.2" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + resolved "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== yargs-parser@20.2.4: version "20.2.4" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54" + resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz" integrity sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA== yargs-parser@^18.1.2: version "18.1.3" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" + resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz" integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== dependencies: camelcase "^5.0.0" @@ -5271,12 +5355,12 @@ yargs-parser@^18.1.2: yargs-parser@^20.2.2: version "20.2.9" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee" + resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz" integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== yargs-unparser@2.0.0: version "2.0.0" - resolved "https://registry.yarnpkg.com/yargs-unparser/-/yargs-unparser-2.0.0.tgz#f131f9226911ae5d9ad38c432fe809366c2325eb" + resolved "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz" integrity sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA== dependencies: camelcase "^6.0.0" @@ -5286,7 +5370,7 @@ yargs-unparser@2.0.0: yargs@16.2.0: version "16.2.0" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + resolved "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz" integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== dependencies: cliui "^7.0.2" @@ -5299,7 +5383,7 @@ yargs@16.2.0: yargs@^15.0.2: version "15.4.1" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" + resolved "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz" integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== dependencies: cliui "^6.0.0" @@ -5316,12 +5400,12 @@ yargs@^15.0.2: yocto-queue@^0.1.0: version "0.1.0" - resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b" + resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== yoga-layout-prebuilt@^1.9.6: version "1.10.0" - resolved "https://registry.yarnpkg.com/yoga-layout-prebuilt/-/yoga-layout-prebuilt-1.10.0.tgz#2936fbaf4b3628ee0b3e3b1df44936d6c146faa6" + resolved "https://registry.npmjs.org/yoga-layout-prebuilt/-/yoga-layout-prebuilt-1.10.0.tgz" integrity sha512-YnOmtSbv4MTf7RGJMK0FvZ+KD8OEe/J5BNnR0GHhD8J/XcG/Qvxgszm0Un6FTHWW4uHlTgP0IztiXQnGyIR45g== dependencies: "@types/yoga-layout" "1.9.2" From a719f069761517d4b236592db206fd151b4aca96 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Mon, 11 Dec 2023 16:35:41 +0100 Subject: [PATCH 099/147] PROF-8523: Add Net events to timeline (#3832) --- .../src/profiling/profilers/events.js | 81 +++++++++++++------ 1 file changed, 57 insertions(+), 24 deletions(-) diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index 4f51e5efdfb..417a91d80d4 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -74,13 +74,46 @@ class GCDecorator { } } +// Maintains "lanes" (or virtual threads) to avoid overlaps in events. The +// decorator starts out with no lanes, and dynamically adds them as needed. +// Every event is put in the first lane where it doesn't overlap with the last +// event in that lane. If there's no lane without overlaps, a new lane is +// created. +class Lanes { + constructor (stringTable, name) { + this.stringTable = stringTable + this.name = name + this.lanes = [] + } + + getLabelFor (item) { + const startTime = item.startTime + const endTime = startTime + item.duration + + // Biases towards populating earlier lanes, but at least it's simple + for (const lane of this.lanes) { + if (lane.endTime <= startTime) { + lane.endTime = endTime + return lane.label + } + } + const label = labelFromStrStr( + this.stringTable, + THREAD_NAME, + `${this.name}-${this.lanes.length}` + ) + this.lanes.push({ endTime, label }) + return label + } +} + class DNSDecorator { constructor (stringTable) { this.stringTable = stringTable this.operationNameLabelKey = stringTable.dedup('operation') this.hostLabelKey = stringTable.dedup('host') this.addressLabelKey = stringTable.dedup('address') - this.lanes = [] + this.lanes = new Lanes(stringTable, `${threadNamePrefix} DNS`) } decorateSample (sampleInput, item) { @@ -107,32 +140,31 @@ class DNSDecorator { addLabel(this.hostLabelKey, detail.host) } } - labels.push(this.getLaneLabelFor(item)) + labels.push(this.lanes.getLabelFor(item)) } +} - // Maintains "lanes" (or virtual threads) to avoid overlaps in events. The - // decorator starts out with no lanes, and dynamically adds them as needed. - // Every event is put in the first lane where it doesn't overlap with the last - // event in that lane. If there's no lane without overlaps, a new lane is - // created. - getLaneLabelFor (item) { - const startTime = item.startTime - const endTime = startTime + item.duration +class NetDecorator { + constructor (stringTable) { + this.stringTable = stringTable + this.operationNameLabelKey = stringTable.dedup('operation') + this.addressLabelKey = stringTable.dedup('address') + this.lanes = new Lanes(stringTable, `${threadNamePrefix} Net`) + } - // Biases towards populating earlier lanes, but at least it's simple - for (const lane of this.lanes) { - if (lane.endTime <= startTime) { - lane.endTime = endTime - return lane.label - } + decorateSample (sampleInput, item) { + const labels = sampleInput.label + const stringTable = this.stringTable + function addLabel (labelNameKey, labelValue) { + labels.push(labelFromStr(stringTable, labelNameKey, labelValue)) } - const label = labelFromStrStr( - this.stringTable, - THREAD_NAME, - `${threadNamePrefix} DNS-${this.lanes.length}` - ) - this.lanes.push({ endTime, label }) - return label + const op = item.name + addLabel(this.operationNameLabelKey, op) + if (op === 'connect') { + const detail = item.detail + addLabel(this.addressLabelKey, `${detail.host}:${detail.port}`) + } + labels.push(this.lanes.getLabelFor(item)) } } @@ -141,9 +173,10 @@ class DNSDecorator { const decoratorTypes = { gc: GCDecorator } -// Needs at least node 16 for DNS +// Needs at least node 16 for DNS and Net if (node16) { decoratorTypes.dns = DNSDecorator + decoratorTypes.net = NetDecorator } /** From d4ee69c81ad128d2381cd6ab889458fb9040b62f Mon Sep 17 00:00:00 2001 From: Ida Liu <119438987+ida613@users.noreply.github.com> Date: Mon, 11 Dec 2023 15:15:28 -0500 Subject: [PATCH 100/147] Partially upgrade instrumentation telemetry from v1 to v2 (#3827) * adding required headers for in tel * adding required headers for in tel * updating version in unit tests * updating version in unit tests * app_started- extended_heartbeat- header- * updating heartbeat and adding extendedheartbeat * updating telemetry to v2 * worked on addressing telemetry uniform heartbeat * added some tests for instel v2 * cleaned up instel tests * adding dependencies variable check & tests * change to capture initial load for dependencies * Initial retry logic for telemetry * fixing heartbeat tests * adding in extended heartbeat * Cleaned up Index and send data * fixed syntax on telemetry tests * Added extended heartbeat logic * updated app-heartbeat test * modified config file for app-started * updating extended heartbeat to setInterval * adding tests for extendedheartbeat * updating extended heartbeat to setInterval * adding tests for extendedheartbeat * fix app-dependencies-loaded * modified config file to support telemetry app-started * modified config file for telemetry * modified config file to report tracer config values to telemetry * finished updating telemetry event app-started from v1 to v2 * fixed config file * fixed lint errors * attempt to remove yarn.lock changes * fixed telemetry settimeout bug * removed config file changes for now * reverted changes in config.spec.js * fixed bugs * disabled telemetry app-heartbeat test for now * added in origin field for telemetry app-started config * only send integrations through telemetry when there are integrations to be sent * only send integrations through telemetry when there are integrations to be sent * corrected telemetry event name * added back missing telemetry config value * added commit to skip telemetry heartbeat test for now * fixed telemetry send-data * Update packages/dd-trace/src/telemetry/dependencies.js Co-authored-by: Thomas Hunter II * Update packages/dd-trace/src/telemetry/dependencies.js Co-authored-by: Thomas Hunter II * Update packages/dd-trace/src/telemetry/index.js Co-authored-by: Thomas Hunter II * Update packages/dd-trace/src/telemetry/index.js Co-authored-by: Thomas Hunter II * Update packages/dd-trace/src/telemetry/index.js Co-authored-by: Thomas Hunter II * Update packages/dd-trace/src/telemetry/send-data.js Co-authored-by: Thomas Hunter II * Update packages/dd-trace/src/telemetry/index.js Co-authored-by: Thomas Hunter II * added agentless telemetry data center routing * modified agentless telemetry routing * updated telemetry logs schema * polished instrumentation telemetry PR * removed app-started error field for now --------- Co-authored-by: Crystal Magloire Co-authored-by: Sam Brenner Co-authored-by: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Co-authored-by: bojbrook Co-authored-by: Crystal Magloire Co-authored-by: Thomas Hunter II --- packages/dd-trace/src/config.js | 8 +- packages/dd-trace/src/startup-log.js | 8 +- .../dd-trace/src/telemetry/dependencies.js | 64 +- packages/dd-trace/src/telemetry/index.js | 178 ++++-- packages/dd-trace/src/telemetry/logs/index.js | 2 +- packages/dd-trace/src/telemetry/send-data.js | 52 +- .../test/telemetry/dependencies.spec.js | 160 ++++- .../dd-trace/test/telemetry/index.spec.js | 582 +++++++++++++++--- .../test/telemetry/logs/index.spec.js | 2 +- .../dd-trace/test/telemetry/send-data.spec.js | 42 +- 10 files changed, 938 insertions(+), 160 deletions(-) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 3cc35ecb6aa..5729e4761de 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -474,6 +474,11 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) DD_IAST_ENABLED ) + const DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED = coalesce( + process.env.DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED, + true + ) + const defaultIastRequestSampling = 30 const iastRequestSampling = coalesce( parseInt(iastOptions?.requestSampling), @@ -618,7 +623,8 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) heartbeatInterval: DD_TELEMETRY_HEARTBEAT_INTERVAL, debug: isTrue(DD_TELEMETRY_DEBUG), logCollection: isTrue(DD_TELEMETRY_LOG_COLLECTION_ENABLED), - metrics: isTrue(DD_TELEMETRY_METRICS_ENABLED) + metrics: isTrue(DD_TELEMETRY_METRICS_ENABLED), + dependencyCollection: DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED } this.protocolVersion = DD_TRACE_AGENT_PROTOCOL_VERSION this.tagsHeaderMaxLength = parseInt(DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH) diff --git a/packages/dd-trace/src/startup-log.js b/packages/dd-trace/src/startup-log.js index 2cce76ed848..af3aa858476 100644 --- a/packages/dd-trace/src/startup-log.js +++ b/packages/dd-trace/src/startup-log.js @@ -6,6 +6,7 @@ const os = require('os') const { inspect } = require('util') const tracerVersion = require('../../../package.json').version +const errors = {} let config let pluginManager let samplingRules = [] @@ -89,6 +90,10 @@ function startupLog ({ agentError } = {}) { info('DATADOG TRACER CONFIGURATION - ' + out) if (agentError) { warn('DATADOG TRACER DIAGNOSTIC - Agent Error: ' + agentError.message) + errors.agentError = { + code: agentError.code ? agentError.code : '', + message: `Agent Error:${agentError.message}` + } } config = undefined @@ -112,5 +117,6 @@ module.exports = { startupLog, setStartupLogConfig, setStartupLogPluginManager, - setSamplingRules + setSamplingRules, + errors } diff --git a/packages/dd-trace/src/telemetry/dependencies.js b/packages/dd-trace/src/telemetry/dependencies.js index 6d502a748f3..5ec0a435b71 100644 --- a/packages/dd-trace/src/telemetry/dependencies.js +++ b/packages/dd-trace/src/telemetry/dependencies.js @@ -6,6 +6,7 @@ const requirePackageJson = require('../require-package-json') const { sendData } = require('./send-data') const dc = require('dc-polyfill') const { fileURLToPath } = require('url') +const { isTrue } = require('../../src/util') const savedDependenciesToSend = new Set() const detectedDependencyKeys = new Set() @@ -14,20 +15,57 @@ const detectedDependencyVersions = new Set() const FILE_URI_START = `file://` const moduleLoadStartChannel = dc.channel('dd-trace:moduleLoadStart') -let immediate, config, application, host +let immediate, config, application, host, initialLoad let isFirstModule = true +let getRetryData +let updateRetryData +function createBatchPayload (payload) { + const batchPayload = [] + payload.map(item => { + batchPayload.push({ + request_type: item.reqType, + payload: item.payload + }) + }) + + return batchPayload +} function waitAndSend (config, application, host) { if (!immediate) { immediate = setImmediate(() => { immediate = null if (savedDependenciesToSend.size > 0) { - const dependencies = Array.from(savedDependenciesToSend.values()).splice(0, 1000).map(pair => { - savedDependenciesToSend.delete(pair) - const [name, version] = pair.split(' ') - return { name, version } - }) - sendData(config, application, host, 'app-dependencies-loaded', { dependencies }) + const dependencies = Array.from(savedDependenciesToSend.values()) + // if a depencdency is from the initial load, *always* send the event + // Otherwise, only send if dependencyCollection is enabled + .filter(dep => { + const initialLoadModule = isTrue(dep.split(' ')[2]) + const sendModule = initialLoadModule || (config.telemetry?.dependencyCollection) + + if (!sendModule) savedDependenciesToSend.delete(dep) // we'll never send it + return sendModule + }) + .splice(0, 2000) // v2 documentation specifies up to 2000 dependencies can be sent at once + .map(pair => { + savedDependenciesToSend.delete(pair) + const [name, version] = pair.split(' ') + return { name, version } + }) + let currPayload + const retryData = getRetryData() + if (retryData) { + currPayload = { reqType: 'app-dependencies-loaded', payload: { dependencies } } + } else { + if (!dependencies.length) return // no retry data and no dependencies, nothing to send + currPayload = { dependencies } + } + + const payload = retryData ? createBatchPayload([currPayload, retryData]) : currPayload + const reqType = retryData ? 'message-batch' : 'app-dependencies-loaded' + + sendData(config, application, host, reqType, payload, updateRetryData) + if (savedDependenciesToSend.size > 0) { waitAndSend(config, application, host) } @@ -76,7 +114,7 @@ function onModuleLoad (data) { const dependencyAndVersion = `${name} ${version}` if (!detectedDependencyVersions.has(dependencyAndVersion)) { - savedDependenciesToSend.add(dependencyAndVersion) + savedDependenciesToSend.add(`${dependencyAndVersion} ${initialLoad}`) detectedDependencyVersions.add(dependencyAndVersion) waitAndSend(config, application, host) @@ -89,11 +127,19 @@ function onModuleLoad (data) { } } } -function start (_config, _application, _host) { +function start (_config = {}, _application, _host, getRetryDataFunction, updateRetryDatafunction) { config = _config application = _application host = _host + initialLoad = true + getRetryData = getRetryDataFunction + updateRetryData = updateRetryDatafunction moduleLoadStartChannel.subscribe(onModuleLoad) + + // try and capture intially loaded modules in the first tick + // since, ideally, the tracer (and this module) should be loaded first, + // this should capture any first-tick dependencies + queueMicrotask(() => { initialLoad = false }) } function isDependency (filename, request) { diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js index 7b3ee094787..a99aba775eb 100644 --- a/packages/dd-trace/src/telemetry/index.js +++ b/packages/dd-trace/src/telemetry/index.js @@ -1,13 +1,11 @@ 'use strict' - const tracerVersion = require('../../../../package.json').version const dc = require('dc-polyfill') const os = require('os') const dependencies = require('./dependencies') const { sendData } = require('./send-data') - +const { errors } = require('../startup-log') const { manager: metricsManager } = require('./metrics') -const logs = require('./logs') const telemetryStartChannel = dc.channel('datadog:telemetry:start') const telemetryStopChannel = dc.channel('datadog:telemetry:stop') @@ -17,11 +15,53 @@ let pluginManager let application let host -let interval let heartbeatTimeout let heartbeatInterval +let extendedInterval +let integrations +let retryData = null +const extendedHeartbeatPayload = {} + const sentIntegrations = new Set() +function getRetryData () { + return retryData +} + +function updateRetryData (error, retryObj) { + if (error) { + if (retryObj.reqType === 'message-batch') { + const payload = retryObj.payload[0].payload + const reqType = retryObj.payload[0].request_type + retryData = { payload: payload, reqType: reqType } + + // Since this payload failed twice it now gets save in to the extended heartbeat + const failedPayload = retryObj.payload[1].payload + const failedReqType = retryObj.payload[1].request_type + + // save away the dependencies and integration request for extended heartbeat. + if (failedReqType === 'app-integrations-change') { + if (extendedHeartbeatPayload['integrations']) { + extendedHeartbeatPayload['integrations'].push(failedPayload) + } else { + extendedHeartbeatPayload['integrations'] = [failedPayload] + } + } + if (failedReqType === 'app-dependencies-loaded') { + if (extendedHeartbeatPayload['dependencies']) { + extendedHeartbeatPayload['dependencies'].push(failedPayload) + } else { + extendedHeartbeatPayload['dependencies'] = [failedPayload] + } + } + } else { + retryData = retryObj + } + } else { + retryData = null + } +} + function getIntegrations () { const newIntegrations = [] for (const pluginName in pluginManager._pluginsByName) { @@ -38,6 +78,23 @@ function getIntegrations () { return newIntegrations } +function getProducts (config) { + const products = { + appsec: { + enabled: config.appsec.enabled + }, + profiler: { + version: tracerVersion, + enabled: config.profiling.enabled + } + } + if (errors.profilingError) { + products.profiler.error = errors.profilingError + errors.profilingError = {} + } + return products +} + function flatten (input, result = [], prefix = [], traversedObjects = null) { traversedObjects = traversedObjects || new WeakSet() if (traversedObjects.has(input)) { @@ -48,33 +105,30 @@ function flatten (input, result = [], prefix = [], traversedObjects = null) { if (typeof value === 'object' && value !== null) { flatten(value, result, [...prefix, key], traversedObjects) } else { - result.push({ name: [...prefix, key].join('.'), value }) + // TODO: add correct origin value + result.push({ name: [...prefix, key].join('.'), value, origin: 'unknown' }) } } return result } -function appStarted () { - return { - integrations: getIntegrations(), - dependencies: [], - configuration: flatten(formatConfig(config)), - additional_payload: [] +function appStarted (config) { + const app = { + products: getProducts(config), + configuration: flatten(config) } -} - -function formatConfig (config) { - // format peerServiceMapping from an object to a string map in order for - // telemetry intake to accept the configuration - config.peerServiceMapping = config.peerServiceMapping - ? Object.entries(config.peerServiceMapping).map(([key, value]) => `${key}:${value}`).join(',') - : '' - return config + // TODO: add app.error with correct error codes + // if (errors.agentError) { + // app.error = errors.agentError + // errors.agentError = {} + // } + return app } function onBeforeExit () { process.removeListener('beforeExit', onBeforeExit) - sendData(config, application, host, 'app-closing') + const { reqType, payload } = createPayload('app-closing') + sendData(config, application, host, reqType, payload) } function createAppObject (config) { @@ -121,14 +175,52 @@ function getTelemetryData () { return { config, application, host, heartbeatInterval } } +function createBatchPayload (payload) { + const batchPayload = [] + payload.map(item => { + batchPayload.push({ + request_type: item.reqType, + payload: item.payload + }) + }) + + return batchPayload +} + +function createPayload (currReqType, currPayload = {}) { + if (getRetryData()) { + const payload = { reqType: currReqType, payload: currPayload } + const batchPayload = createBatchPayload([payload, retryData]) + return { 'reqType': 'message-batch', 'payload': batchPayload } + } + + return { 'reqType': currReqType, 'payload': currPayload } +} + function heartbeat (config, application, host) { heartbeatTimeout = setTimeout(() => { - sendData(config, application, host, 'app-heartbeat') + metricsManager.send(config, application, host) + + const { reqType, payload } = createPayload('app-heartbeat') + sendData(config, application, host, reqType, payload, updateRetryData) heartbeat(config, application, host) }, heartbeatInterval).unref() return heartbeatTimeout } +function extendedHeartbeat (config) { + extendedInterval = setInterval(() => { + const appPayload = appStarted(config) + const payload = { + ...appPayload, + ...extendedHeartbeatPayload + } + sendData(config, application, host, 'app-extended-heartbeat', payload) + Object.keys(extendedHeartbeatPayload).forEach(key => delete extendedHeartbeatPayload[key]) + }, 1000 * 60 * 60 * 24).unref() + return extendedInterval +} + function start (aConfig, thePluginManager) { if (!aConfig.telemetry.enabled) { return @@ -138,19 +230,22 @@ function start (aConfig, thePluginManager) { application = createAppObject(config) host = createHostObject() heartbeatInterval = config.telemetry.heartbeatInterval + integrations = getIntegrations() + + dependencies.start(config, application, host, getRetryData, updateRetryData) + + sendData(config, application, host, 'app-started', appStarted(config)) - dependencies.start(config, application, host) - logs.start(config) + if (integrations.length > 0) { + sendData(config, application, host, 'app-integrations-change', + { integrations }, updateRetryData) + } - sendData(config, application, host, 'app-started', appStarted()) heartbeat(config, application, host) - interval = setInterval(() => { - metricsManager.send(config, application, host) - logs.send(config, application, host) - }, heartbeatInterval) - interval.unref() - process.on('beforeExit', onBeforeExit) + extendedHeartbeat(config) + + process.on('beforeExit', onBeforeExit) telemetryStartChannel.publish(getTelemetryData()) } @@ -158,7 +253,7 @@ function stop () { if (!config) { return } - clearInterval(interval) + clearInterval(extendedInterval) clearTimeout(heartbeatTimeout) process.removeListener('beforeExit', onBeforeExit) @@ -175,7 +270,10 @@ function updateIntegrations () { if (integrations.length === 0) { return } - sendData(config, application, host, 'app-integrations-change', { integrations }) + + const { reqType, payload } = createPayload('app-integrations-change', { integrations }) + + sendData(config, application, host, reqType, payload, updateRetryData) } function updateConfig (changes, config) { @@ -188,21 +286,15 @@ function updateConfig (changes, config) { const application = createAppObject(config) const host = createHostObject() - const names = { - sampleRate: 'DD_TRACE_SAMPLE_RATE', - logInjection: 'DD_LOG_INJECTION', - headerTags: 'DD_TRACE_HEADER_TAGS' - } - const configuration = changes.map(change => ({ - name: names[change.name], + name: change.name, value: Array.isArray(change.value) ? change.value.join(',') : change.value, origin: change.origin })) - sendData(config, application, host, 'app-client-configuration-change', { - configuration - }) + const { reqType, payload } = createPayload('app-client-configuration-change', { configuration }) + + sendData(config, application, host, reqType, payload, updateRetryData) } module.exports = { diff --git a/packages/dd-trace/src/telemetry/logs/index.js b/packages/dd-trace/src/telemetry/logs/index.js index 4584061613e..44a6a2fa4c5 100644 --- a/packages/dd-trace/src/telemetry/logs/index.js +++ b/packages/dd-trace/src/telemetry/logs/index.js @@ -52,7 +52,7 @@ function stop () { function send (config, application, host) { if (!enabled) return - const logs = logCollector.drain() + const logs = { 'logs': logCollector.drain() } if (logs) { sendData(config, application, host, 'logs', logs) } diff --git a/packages/dd-trace/src/telemetry/send-data.js b/packages/dd-trace/src/telemetry/send-data.js index f460dfdac3a..ae1bc93a454 100644 --- a/packages/dd-trace/src/telemetry/send-data.js +++ b/packages/dd-trace/src/telemetry/send-data.js @@ -1,9 +1,12 @@ + const request = require('../exporters/common/request') +const log = require('../log') +let agentTelemetry = true function getHeaders (config, application, reqType) { const headers = { 'content-type': 'application/json', - 'dd-telemetry-api-version': 'v1', + 'dd-telemetry-api-version': 'v2', 'dd-telemetry-request-type': reqType, 'dd-client-library-language': application.language_name, 'dd-client-library-version': application.tracer_version @@ -28,7 +31,7 @@ function getPayload (payload) { } } -function sendData (config, application, host, reqType, payload = {}) { +function sendData (config, application, host, reqType, payload = {}, cb = () => {}) { const { hostname, port, @@ -44,7 +47,8 @@ function sendData (config, application, host, reqType, payload = {}) { headers: getHeaders(config, application, reqType) } const data = JSON.stringify({ - api_version: 'v1', + api_version: 'v2', + naming_schema_version: config.spanAttributeSchema ? config.spanAttributeSchema : '', request_type: reqType, tracer_time: Math.floor(Date.now() / 1000), runtime_id: config.tags['runtime-id'], @@ -54,8 +58,46 @@ function sendData (config, application, host, reqType, payload = {}) { host }) - request(data, options, () => { - // ignore errors + request(data, options, (error) => { + if (error && process.env.DD_API_KEY && config.site) { + if (agentTelemetry) { + log.warn('Agent telemetry failed, started agentless telemetry') + agentTelemetry = false + } + // figure out which data center to send to + let backendUrl + const dataCenters = [ + 'datadoghq.com', + 'us3.datadoghq.com', + 'us5.datadoghq.com', + 'ap1.datadoghq.com', + 'eu1.datadoghq.com' + ] + if (config.site === 'datad0g.com') { // staging + backendUrl = 'https://all-http-intake.logs.datad0g.com/api/v2/apmtelemetry' + } else if (dataCenters.includes(config.site)) { + backendUrl = 'https://instrumentation-telemetry-intake.' + config.site + '/api/v2/apmtelemetry' + } + const backendHeader = { ...options.headers, 'DD-API-KEY': process.env.DD_API_KEY } + const backendOptions = { + ...options, + url: backendUrl, + headers: backendHeader + } + if (backendUrl) { + request(data, backendOptions, (error) => { log.error(error) }) + } else { + log.error('Invalid Telemetry URL') + } + } + + if (!error && !agentTelemetry) { + agentTelemetry = true + log.info('Started agent telemetry') + } + + // call the callback function so that we can track the error and payload + cb(error, { payload, reqType }) }) } diff --git a/packages/dd-trace/test/telemetry/dependencies.spec.js b/packages/dd-trace/test/telemetry/dependencies.spec.js index 971d8066503..f09f1be2774 100644 --- a/packages/dd-trace/test/telemetry/dependencies.spec.js +++ b/packages/dd-trace/test/telemetry/dependencies.spec.js @@ -15,6 +15,7 @@ describe('dependencies', () => { const dependencies = proxyquire('../../src/telemetry/dependencies', { 'dc-polyfill': dc }) + dependencies.start() expect(subscribe).to.have.been.calledOnce }) @@ -29,17 +30,22 @@ describe('dependencies', () => { let dependencies let sendData let requirePackageJson + let getRetryData + let updateRetryData beforeEach(() => { requirePackageJson = sinon.stub() sendData = sinon.stub() + getRetryData = sinon.stub() + updateRetryData = sinon.stub() dependencies = proxyquire('../../src/telemetry/dependencies', { + './index': { getRetryData, updateRetryData }, './send-data': { sendData }, '../require-package-json': requirePackageJson }) global.setImmediate = function (callback) { callback() } - dependencies.start(config, application, host) + dependencies.start(config, application, host, getRetryData, updateRetryData) // force first publish to load cached requires moduleLoadStartChannel.publish({}) @@ -48,6 +54,8 @@ describe('dependencies', () => { afterEach(() => { dependencies.stop() sendData.reset() + getRetryData.reset() + updateRetryData.reset() global.setImmediate = originalSetImmediate }) @@ -265,7 +273,7 @@ describe('dependencies', () => { expect(sendData).to.have.been.calledOnce }) - it('should call sendData twice with more than 1000 dependencies', (done) => { + it('should call sendData twice with more than 2000 dependencies', (done) => { const requestPrefix = 'custom-module' requirePackageJson.returns({ version: '1.0.0' }) const timeouts = [] @@ -280,7 +288,7 @@ describe('dependencies', () => { timeouts.push(timeout) return timeout } - for (let i = 0; i < 1200; i++) { + for (let i = 0; i < 2200; i++) { const request = requestPrefix + i const filename = path.join(basepathWithoutNodeModules, 'node_modules', request, 'index.js') moduleLoadStartChannel.publish({ request, filename }) @@ -294,4 +302,150 @@ describe('dependencies', () => { }) }) }) + + describe('with configuration', () => { + const config = { + telemetry: { + dependencyCollection: false + } + } + const application = 'test' + const host = 'host' + const basepathWithoutNodeModules = process.cwd().replace(/node_modules/g, 'nop') + + let dependencies + let sendData + let requirePackageJson + let getRetryData + let updateRetryData + + beforeEach(() => { + requirePackageJson = sinon.stub() + sendData = sinon.stub() + getRetryData = sinon.stub() + updateRetryData = sinon.stub() + dependencies = proxyquire('../../src/telemetry/dependencies', { + './index': { getRetryData, updateRetryData }, + './send-data': { sendData }, + '../require-package-json': requirePackageJson + }) + global.setImmediate = function (callback) { callback() } + + dependencies.start(config, application, host, getRetryData, updateRetryData) + + // force first publish to load cached requires + moduleLoadStartChannel.publish({}) // called once here + const request = 'custom-module' + requirePackageJson.returns({ version: '1.0.0' }) + const filename = path.join(basepathWithoutNodeModules, 'node_modules', request, 'index.js') + moduleLoadStartChannel.publish({ request, filename }) // called again here + }) + + afterEach(() => { + dependencies.stop() + sendData.reset() + getRetryData.reset() + updateRetryData.reset() + global.setImmediate = originalSetImmediate + }) + + it('should not call sendData for modules not captured in the initial load', done => { + setTimeout(() => { + // using sendData.callCount wasn't working properly + const timesCalledBeforeLazyLoad = sendData.getCalls().length + + const request = 'custom-module2' + const filename = path.join(basepathWithoutNodeModules, 'node_modules', request, 'index.js') + moduleLoadStartChannel.publish({ request, filename }) // should not be called here + + expect(sendData.getCalls().length).to.equal(timesCalledBeforeLazyLoad) + done() + }, 5) // simulate lazy-loaded dependency, small ms delay to be safe + }) + }) + + describe('on failed request', () => { + const config = {} + const application = 'test' + const host = 'host' + const basepathWithoutNodeModules = process.cwd().replace(/node_modules/g, 'nop') + let dependencies + let sendData + let requirePackageJson + let capturedRequestType + let getRetryData + let updateRetryData + + beforeEach(() => { + requirePackageJson = sinon.stub() + sendData = (config, application, host, reqType, payload, cb = () => {}) => { + capturedRequestType = reqType + // Simulate an HTTP error by calling the callback with an error + cb(new Error('HTTP request error'), { + payload: payload, + reqType: 'app-integrations-change' + }) + } + getRetryData = sinon.stub() + updateRetryData = sinon.stub() + dependencies = proxyquire('../../src/telemetry/dependencies', { + './send-data': { sendData }, + '../require-package-json': requirePackageJson + }) + global.setImmediate = function (callback) { callback() } + + dependencies.start(config, application, host, getRetryData, updateRetryData) + + // force first publish to load cached requires + moduleLoadStartChannel.publish({}) + }) + + afterEach(() => { + dependencies.stop() + getRetryData.reset() + updateRetryData.reset() + global.setImmediate = originalSetImmediate + }) + + it('should update retry data', () => { + const request = 'custom-module' + requirePackageJson.returns({ version: '1.0.0' }) + const filename = path.join(basepathWithoutNodeModules, 'node_modules', request, 'index.js') + moduleLoadStartChannel.publish({ request, filename }) + // expect(getRetryData).to.have.been.calledOnce + expect(capturedRequestType).to.equals('app-dependencies-loaded') + // expect(sendData).to.have.been.calledOnce + // expect(updateRetryData).to.have.been.calledOnce + }) + + it('should create batch request', () => { + let request = 'custom-module' + requirePackageJson.returns({ version: '1.0.0' }) + let filename = path.join(basepathWithoutNodeModules, 'node_modules', request, 'index.js') + moduleLoadStartChannel.publish({ request, filename }) + expect(getRetryData).to.have.been.calledOnce + expect(capturedRequestType).to.equals('app-dependencies-loaded') + expect(updateRetryData).to.have.been.calledOnce + + getRetryData.returns({ + request_type: 'app-integrations-change', + payload: { + 'integrations': [{ + name: 'zoo1', + enabled: true, + auto_enabled: true + }] + } + + }) + + request = 'even-more-custom-module' + requirePackageJson.returns({ version: '1.0.0' }) + filename = path.join(basepathWithoutNodeModules, 'node_modules', request, 'index.js') + moduleLoadStartChannel.publish({ request, filename }) + expect(getRetryData).to.have.been.calledTwice + expect(capturedRequestType).to.equals('message-batch') + expect(updateRetryData).to.have.been.calledTwice + }) + }) }) diff --git a/packages/dd-trace/test/telemetry/index.spec.js b/packages/dd-trace/test/telemetry/index.spec.js index 0a089b0c642..51895d33e00 100644 --- a/packages/dd-trace/test/telemetry/index.spec.js +++ b/packages/dd-trace/test/telemetry/index.spec.js @@ -8,28 +8,22 @@ const http = require('http') const { once } = require('events') const { storage } = require('../../../datadog-core') const os = require('os') +const sinon = require('sinon') + +const DEFAULT_HEARTBEAT_INTERVAL = 60000 let traceAgent describe('telemetry', () => { - const HEARTBEAT_INTERVAL = 60000 - let origSetInterval let telemetry let pluginsByName before(done => { - origSetInterval = setInterval - - global.setInterval = (fn, interval) => { - expect(interval).to.equal(HEARTBEAT_INTERVAL) - // we only want one of these - return setTimeout(fn, 100) - } - // I'm not sure how, but some other test in some other file keeps context // alive after it's done, meaning this test here runs in its async context. // If we don't no-op the server inside it, it will trace it, which will // screw up this test file entirely. -- bengl + storage.run({ noop: true }, () => { traceAgent = http.createServer(async (req, res) => { const chunks = [] @@ -65,7 +59,7 @@ describe('telemetry', () => { circularObject.child.parent = circularObject telemetry.start({ - telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + telemetry: { enabled: true, heartbeatInterval: DEFAULT_HEARTBEAT_INTERVAL }, hostname: 'localhost', port: traceAgent.address().port, service: 'test service', @@ -75,6 +69,8 @@ describe('telemetry', () => { 'runtime-id': '1a2b3c' }, circularObject, + appsec: { enabled: true }, + profiling: { enabled: true }, peerServiceMapping: { 'service_1': 'remapped_service_1', 'service_2': 'remapped_service_2' @@ -87,29 +83,41 @@ describe('telemetry', () => { after(() => { telemetry.stop() traceAgent.close() - global.setInterval = origSetInterval }) it('should send app-started', () => { return testSeq(1, 'app-started', payload => { - expect(payload).to.deep.include({ + expect(payload).to.have.property('products').that.deep.equal({ + appsec: { enabled: true }, + profiler: { version: '5.0.0-pre', enabled: true } + }) + expect(payload).to.have.property('configuration').that.deep.equal([ + { name: 'telemetry.enabled', value: true, origin: 'unknown' }, + { name: 'telemetry.heartbeatInterval', value: DEFAULT_HEARTBEAT_INTERVAL, origin: 'unknown' }, + { name: 'hostname', value: 'localhost', origin: 'unknown' }, + { name: 'port', value: traceAgent.address().port, origin: 'unknown' }, + { name: 'service', value: 'test service', origin: 'unknown' }, + { name: 'version', value: '1.2.3-beta4', origin: 'unknown' }, + { name: 'env', value: 'preprod', origin: 'unknown' }, + { name: 'tags.runtime-id', value: '1a2b3c', origin: 'unknown' }, + { name: 'circularObject.child.field', value: 'child_value', origin: 'unknown' }, + { name: 'circularObject.field', value: 'parent_value', origin: 'unknown' }, + { name: 'appsec.enabled', value: true, origin: 'unknown' }, + { name: 'profiling.enabled', value: true, origin: 'unknown' }, + { name: 'peerServiceMapping.service_1', value: 'remapped_service_1', origin: 'unknown' }, + { name: 'peerServiceMapping.service_2', value: 'remapped_service_2', origin: 'unknown' } + ]) + }) + }) + + it('should send app-integrations', () => { + return testSeq(2, 'app-integrations-change', payload => { + expect(payload).to.deep.equal({ integrations: [ { name: 'foo2', enabled: true, auto_enabled: true }, { name: 'bar2', enabled: false, auto_enabled: true } - ], - dependencies: [] - }).and.to.have.property('configuration').that.include.members([ - { name: 'telemetry.enabled', value: true }, - { name: 'hostname', value: 'localhost' }, - { name: 'port', value: traceAgent.address().port }, - { name: 'service', value: 'test service' }, - { name: 'version', value: '1.2.3-beta4' }, - { name: 'env', value: 'preprod' }, - { name: 'tags.runtime-id', value: '1a2b3c' }, - { name: 'circularObject.field', value: 'parent_value' }, - { name: 'circularObject.child.field', value: 'child_value' }, - { name: 'peerServiceMapping', value: 'service_1:remapped_service_1,service_2:remapped_service_2' } - ]) + ] + }) }) }) @@ -117,7 +125,7 @@ describe('telemetry', () => { pluginsByName.baz2 = { _enabled: true } telemetry.updateIntegrations() - return testSeq(2, 'app-integrations-change', payload => { + return testSeq(3, 'app-integrations-change', payload => { expect(payload).to.deep.equal({ integrations: [ { name: 'baz2', enabled: true, auto_enabled: true } @@ -130,7 +138,7 @@ describe('telemetry', () => { pluginsByName.boo2 = { _enabled: true } telemetry.updateIntegrations() - return testSeq(3, 'app-integrations-change', payload => { + return testSeq(4, 'app-integrations-change', payload => { expect(payload).to.deep.equal({ integrations: [ { name: 'boo2', enabled: true, auto_enabled: true } @@ -140,12 +148,12 @@ describe('telemetry', () => { }) // TODO: make this work regardless of the test runner - it.skip('should send app-closing', () => { - process.emit('beforeExit') - return testSeq(5, 'app-closing', payload => { - expect(payload).to.deep.equal({}) - }) - }) + // it.skip('should send app-closing', () => { + // process.emit('beforeExit') + // return testSeq(5, 'app-closing', payload => { + // expect(payload).to.deep.equal({}) + // }) + // }) it('should do nothing when not enabled', (done) => { telemetry.stop() @@ -163,25 +171,16 @@ describe('telemetry', () => { server.close() done() }, 10) + clearTimeout() }) }) }) describe('telemetry app-heartbeat', () => { - const HEARTBEAT_INTERVAL = 60 - let origSetInterval let telemetry - let pluginsByName + const HEARTBEAT_INTERVAL = 60 before(done => { - origSetInterval = setInterval - - global.setInterval = (fn, interval) => { - expect(interval).to.equal(HEARTBEAT_INTERVAL) - // we only want one of these - return setTimeout(fn, 100) - } - storage.run({ noop: true }, () => { traceAgent = http.createServer(async (req, res) => { const chunks = [] @@ -205,17 +204,6 @@ describe('telemetry app-heartbeat', () => { } }) - pluginsByName = { - foo2: { _enabled: true }, - bar2: { _enabled: false } - } - - const circularObject = { - child: { parent: null, field: 'child_value' }, - field: 'parent_value' - } - circularObject.child.parent = circularObject - telemetry.start({ telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, hostname: 'localhost', @@ -226,9 +214,10 @@ describe('telemetry app-heartbeat', () => { tags: { 'runtime-id': '1a2b3c' }, - circularObject + appsec: { enabled: false }, + profiling: { enabled: false } }, { - _pluginsByName: pluginsByName + _pluginsByName: {} }) }) @@ -236,67 +225,483 @@ describe('telemetry app-heartbeat', () => { setTimeout(() => { telemetry.stop() traceAgent.close() - global.setInterval = origSetInterval }, HEARTBEAT_INTERVAL * 3) + clearTimeout() }) - it('should send app-heartbeat at uniform intervals', () => { - // TODO: switch to clock.tick - setTimeout(() => { - const heartbeats = [] + // flaky, will need to look into this later + it.skip('should send app-heartbeat at uniform intervals', (done) => { + function getHeartbeatCount () { + let heartbeatCount = 0 const reqCount = traceAgent.reqs.length for (let i = 0; i < reqCount; i++) { const req = traceAgent.reqs[i] if (req.headers && req.headers['dd-telemetry-request-type'] === 'app-heartbeat') { - heartbeats.push(req.body.tracer_time) + heartbeatCount++ + } + } + return heartbeatCount + } + + // TODO: switch to clock.tick + // for some reason clock.tick works with the other tests but not this one + // Ida Liu spent fruitless hours to investigate ;u; + setTimeout(() => { + expect(getHeartbeatCount()).to.be.equal(0) + }, HEARTBEAT_INTERVAL * 0.75) + setTimeout(() => { + expect(getHeartbeatCount()).to.be.equal(1) + }, HEARTBEAT_INTERVAL * 1.2) + setTimeout(() => { + expect(getHeartbeatCount()).to.be.equal(1) + }, HEARTBEAT_INTERVAL * 1.9) + setTimeout(() => { + expect(getHeartbeatCount()).to.be.equal(2) + done() + }, HEARTBEAT_INTERVAL * 2.1) + }) +}) + +describe('Telemetry extended heartbeat', () => { + const HEARTBEAT_INTERVAL = 43200000 + let telemetry + let pluginsByName + let clock + + before(() => { + clock = sinon.useFakeTimers() + }) + + after(() => { + clock.restore() + telemetry.stop() + traceAgent.close() + }) + it('extended beat', (done) => { + let extendedHeartbeatRequest + let beats = 0 // to keep track of the amont of times extendedHeartbeat is called + const sendDataRequest = { + sendData: (config, application, host, reqType, payload, cb = () => {}) => { + if (reqType === 'app-started') { + cb() + return + } + + if (reqType === 'app-extended-heartbeat') { + beats++ + extendedHeartbeatRequest = reqType } } - expect(heartbeats.length).to.be.greaterThanOrEqual(2) - for (let k = 0; k++; k < heartbeats.length - 1) { - expect(heartbeats[k + 1] - heartbeats[k]).to.be.equal(1) + + } + telemetry = proxyquire('../../src/telemetry', { + '../exporters/common/docker': { + id () { + return 'test docker id' + } + }, + './send-data': sendDataRequest + }) + + telemetry.start({ + telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + hostname: 'localhost', + port: 0, + service: 'test service', + version: '1.2.3-beta4', + appsec: { enabled: true }, + profiling: { enabled: true }, + env: 'preprod', + tags: { + 'runtime-id': '1a2b3c' } - }, HEARTBEAT_INTERVAL * 3) + }, { + _pluginsByName: pluginsByName + }) + clock.tick(86400000) + expect(extendedHeartbeatRequest).to.equal('app-extended-heartbeat') + expect(beats).to.equal(1) + clock.tick(86400000) + expect(beats).to.equal(2) + done() }) }) -describe('telemetry with interval change', () => { - it('should set the interval correctly', (done) => { - const telemetry = proxyquire('../../src/telemetry', { +// deleted this test for now since the global interval is now used for app-extended heartbeat +// which is not supposed to be configurable +// will ask Bryan why being able to change the interval is important after he is back from parental leave +describe('Telemetry retry', () => { + let telemetry + let capturedRequestType + let capturedPayload + let count = 0 + let pluginsByName + let clock + const HEARTBEAT_INTERVAL = 60000 + + beforeEach(() => { + clock = sinon.useFakeTimers() + pluginsByName = { + foo2: { _enabled: true }, + bar2: { _enabled: false } + } + }) + afterEach(() => { + clock.restore() + }) + + it('should retry data on next app change', () => { + const sendDataError = { + sendData: (config, application, host, reqType, payload, cb = () => {}) => { + capturedRequestType = reqType + capturedPayload = payload + + if (count < 2) { + count += 1 + return + } + // Simulate an HTTP error by calling the callback with an error + cb(new Error('HTTP request error'), { + payload: payload, + reqType: 'app-integrations-change' + }) + } + + } + telemetry = proxyquire('../../src/telemetry', { + '../exporters/common/docker': { + id () { + return 'test docker id' + } + }, + './send-data': sendDataError + }) + + telemetry.start({ + telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + hostname: 'localhost', + port: 0, + service: 'test service', + version: '1.2.3-beta4', + appsec: { enabled: true }, + profiling: { enabled: true }, + env: 'preprod', + tags: { + 'runtime-id': '1a2b3c' + } + }, { + _pluginsByName: pluginsByName + }) + + pluginsByName.boo3 = { _enabled: true } + telemetry.updateIntegrations() + expect(capturedRequestType).to.equal('app-integrations-change') + expect(capturedPayload).to.deep.equal({ + 'integrations': [{ + name: 'boo3', + enabled: true, + auto_enabled: true + }] + }) + + pluginsByName.boo5 = { _enabled: true } + telemetry.updateIntegrations() + expect(capturedRequestType).to.equal('message-batch') + expect(capturedPayload).to.deep.equal([{ + request_type: 'app-integrations-change', + payload: { + 'integrations': [{ + name: 'boo5', + enabled: true, + auto_enabled: true + }] + } + + }, { + request_type: 'app-integrations-change', + payload: { + 'integrations': [{ + name: 'boo3', + enabled: true, + auto_enabled: true + }] + } + + }] + ) + }) + + it('should retry data on next heartbeat', () => { + const sendDataError = { + sendData: (config, application, host, reqType, payload, cb = () => {}) => { + // skipping startup command + if (reqType === 'app-started') { + cb() + return + } + // skipping startup command + if (reqType === 'message-batch') { + capturedRequestType = reqType + capturedPayload = payload + cb() + return + } + // Simulate an HTTP error by calling the callback with an error + cb(new Error('HTTP request error'), { + payload: payload, + reqType: reqType + }) + } + + } + telemetry = proxyquire('../../src/telemetry', { '../exporters/common/docker': { id () { return 'test docker id' } }, - './send-data': { - sendData: () => {} + './send-data': sendDataError + }) + + telemetry.start({ + telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + hostname: 'localhost', + port: 0, + service: 'test service', + version: '1.2.3-beta4', + appsec: { enabled: true }, + profiling: { enabled: true }, + env: 'preprod', + tags: { + 'runtime-id': '1a2b3c' } + }, { + _pluginsByName: pluginsByName }) + // jump to next heartbeat request + clock.tick(HEARTBEAT_INTERVAL) + expect(capturedRequestType).to.equal('message-batch') + expect(capturedPayload).to.deep.equal([{ + request_type: 'app-heartbeat', + payload: {} + }, { + request_type: 'app-integrations-change', + payload: { + 'integrations': [{ + name: 'foo2', + enabled: true, + auto_enabled: true + }, + { + name: 'bar2', + enabled: false, + auto_enabled: true + }] + } + + }] + ) + }) + + it('should send regular request after completed batch request ', () => { + const sendDataError = { + sendData: (config, application, host, reqType, payload, cb = () => {}) => { + capturedRequestType = reqType + capturedPayload = payload + + // skipping startup command + if (reqType === 'app-started' || reqType === 'message-batch') { + cb() + return + } + + // Simulate an HTTP error by calling the callback with an error + cb(new Error('HTTP request error'), { + payload: payload, + reqType: 'app-integrations-change' + }) + } - let intervalSetCorrectly - global.setInterval = (fn, interval) => { - expect(interval).to.equal(12345000) - intervalSetCorrectly = true - return setTimeout(fn, 1) } + telemetry = proxyquire('../../src/telemetry', { + '../exporters/common/docker': { + id () { + return 'test docker id' + } + }, + './send-data': sendDataError + }) telemetry.start({ - telemetry: { enabled: true, heartbeatInterval: 12345000 }, + telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, hostname: 'localhost', - port: 8126, + port: 0, service: 'test service', version: '1.2.3-beta4', + appsec: { enabled: true }, + profiling: { enabled: true }, env: 'preprod', tags: { 'runtime-id': '1a2b3c' } }, { - _pluginsByName: {} + _pluginsByName: pluginsByName }) + pluginsByName.foo1 = { _enabled: true } + telemetry.updateIntegrations() // This sends an batch message and succeeds - process.nextTick(() => { - expect(intervalSetCorrectly).to.be.true - telemetry.stop() - done() + pluginsByName.zoo1 = { _enabled: true } + telemetry.updateIntegrations() + expect(capturedRequestType).to.equal('app-integrations-change') + + expect(capturedPayload).to.deep.equal({ + 'integrations': [{ + name: 'zoo1', + enabled: true, + auto_enabled: true + }] + }) + }) + + it('should updated batch request after previous fail', () => { + const sendDataError = { + sendData: (config, application, host, reqType, payload, cb = () => {}) => { + capturedRequestType = reqType + capturedPayload = payload + + // skipping startup command + if (reqType === 'app-started') { + cb() + return + } + + // Simulate an HTTP error by calling the callback with an error + cb(new Error('HTTP request error'), { + payload: payload, + reqType: reqType + }) + } + + } + telemetry = proxyquire('../../src/telemetry', { + '../exporters/common/docker': { + id () { + return 'test docker id' + } + }, + './send-data': sendDataError + }) + + // Start function sends 2 messages app-started & app-integrations-change + telemetry.start({ + telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + hostname: 'localhost', + port: 0, + service: 'test service', + version: '1.2.3-beta4', + appsec: { enabled: true }, + profiling: { enabled: true }, + env: 'preprod', + tags: { + 'runtime-id': '1a2b3c' + } + }, { + _pluginsByName: pluginsByName + }) + + pluginsByName.foo1 = { _enabled: true } + telemetry.updateIntegrations() // This sends an batch message and fails + + pluginsByName.zoo1 = { _enabled: true } + telemetry.updateIntegrations() + + expect(capturedRequestType).to.equal('message-batch') + expect(capturedPayload).to.deep.equal([{ + request_type: 'app-integrations-change', + payload: { + 'integrations': [{ + name: 'zoo1', + enabled: true, + auto_enabled: true + }] + } + + }, { + request_type: 'app-integrations-change', + payload: { + 'integrations': [{ + name: 'foo1', + enabled: true, + auto_enabled: true + }] + } + + }] + ) + }) + + it('should set extended heartbeat payload', async () => { + let extendedHeartbeatRequest + let extendedHeartbeatPayload + const sendDataError = { + sendData: (config, application, host, reqType, payload, cb = () => {}) => { + // skipping startup command + if (reqType === 'app-started') { + cb() + return + } + + if (reqType === 'app-extended-heartbeat') { + extendedHeartbeatRequest = reqType + extendedHeartbeatPayload = payload + return + } + + // Simulate an HTTP error by calling the callback with an error + cb(new Error('HTTP request error'), { + payload: payload, + reqType: reqType + }) + } + + } + telemetry = proxyquire('../../src/telemetry', { + '../exporters/common/docker': { + id () { + return 'test docker id' + } + }, + './send-data': sendDataError + }) + + // Start function sends 2 messages app-started & app-integrations-change + telemetry.start({ + telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + hostname: 'localhost', + port: 0, + service: 'test service', + version: '1.2.3-beta4', + appsec: { enabled: true }, + profiling: { enabled: true }, + env: 'preprod', + tags: { + 'runtime-id': '1a2b3c' + } + }, + { + _pluginsByName: pluginsByName + }) + pluginsByName.foo1 = { _enabled: true } + telemetry.updateIntegrations() // This sends an batch message and fails + // Skip forward a day + clock.tick(86400000) + expect(extendedHeartbeatRequest).to.equal('app-extended-heartbeat') + expect(extendedHeartbeatPayload).to.haveOwnProperty('integrations') + expect(extendedHeartbeatPayload['integrations']).to.deep.include({ + integrations: [ + { name: 'foo2', enabled: true, auto_enabled: true }, + { name: 'bar2', enabled: false, auto_enabled: true } + ] }) }) }) @@ -310,7 +715,7 @@ async function testSeq (seqId, reqType, validatePayload) { expect(req.url).to.equal(`/telemetry/proxy/api/v2/apmtelemetry`) expect(req.headers).to.include({ 'content-type': 'application/json', - 'dd-telemetry-api-version': 'v1', + 'dd-telemetry-api-version': 'v2', 'dd-telemetry-request-type': reqType }) const osName = os.type() @@ -336,7 +741,8 @@ async function testSeq (seqId, reqType, validatePayload) { } } expect(req.body).to.deep.include({ - api_version: 'v1', + api_version: 'v2', + naming_schema_version: '', request_type: reqType, runtime_id: '1a2b3c', seq_id: seqId, diff --git a/packages/dd-trace/test/telemetry/logs/index.spec.js b/packages/dd-trace/test/telemetry/logs/index.spec.js index 82a2d380122..1ed06d41976 100644 --- a/packages/dd-trace/test/telemetry/logs/index.spec.js +++ b/packages/dd-trace/test/telemetry/logs/index.spec.js @@ -165,7 +165,7 @@ describe('telemetry logs', () => { logs.send(defaultConfig, application, host) - expect(sendData).to.be.calledOnceWithExactly(defaultConfig, application, host, 'logs', collectedLogs) + expect(sendData).to.be.calledOnceWithExactly(defaultConfig, application, host, 'logs', { 'logs': collectedLogs }) }) it('should not drain logCollector and call sendData if not enabled', () => { diff --git a/packages/dd-trace/test/telemetry/send-data.spec.js b/packages/dd-trace/test/telemetry/send-data.spec.js index 0fce52bb3a2..6b5c9869fb7 100644 --- a/packages/dd-trace/test/telemetry/send-data.spec.js +++ b/packages/dd-trace/test/telemetry/send-data.spec.js @@ -2,6 +2,7 @@ require('../setup/tap') +const { expect } = require('chai') const proxyquire = require('proxyquire') describe('sendData', () => { const application = { @@ -33,7 +34,7 @@ describe('sendData', () => { path: '/telemetry/proxy/api/v2/apmtelemetry', headers: { 'content-type': 'application/json', - 'dd-telemetry-api-version': 'v1', + 'dd-telemetry-api-version': 'v2', 'dd-telemetry-request-type': 'req-type', 'dd-client-library-language': application.language_name, 'dd-client-library-version': application.tracer_version @@ -58,7 +59,7 @@ describe('sendData', () => { path: '/telemetry/proxy/api/v2/apmtelemetry', headers: { 'content-type': 'application/json', - 'dd-telemetry-api-version': 'v1', + 'dd-telemetry-api-version': 'v2', 'dd-telemetry-request-type': 'req-type', 'dd-client-library-language': application.language_name, 'dd-client-library-version': application.tracer_version @@ -84,7 +85,7 @@ describe('sendData', () => { path: '/telemetry/proxy/api/v2/apmtelemetry', headers: { 'content-type': 'application/json', - 'dd-telemetry-api-version': 'v1', + 'dd-telemetry-api-version': 'v2', 'dd-telemetry-request-type': 'req-type', 'dd-telemetry-debug-enabled': 'true', 'dd-client-library-language': application.language_name, @@ -112,13 +113,38 @@ describe('sendData', () => { expect(data.payload).to.deep.equal(trimmedPayload) }) - it('should not destructure a payload with array type', () => { - const arrayPayload = [{ message: 'test' }, { message: 'test2' }] - sendDataModule.sendData({ tags: { 'runtime-id': '123' } }, 'test', 'test', 'req-type', arrayPayload) + it('should send batch request with retryPayload', () => { + const retryObjData = { 'payload': { 'foo': 'bar' }, 'request_type': 'req-type-1' } + const payload = [{ + 'request_type': 'req-type-2', + 'payload': { + integrations: [ + { name: 'foo2', enabled: true, auto_enabled: true }, + { name: 'bar2', enabled: false, auto_enabled: true } + ] + } + + }, retryObjData] + + sendDataModule.sendData({ tags: { 'runtime-id': '123' } }, + { 'language': 'js' }, 'test', 'message-batch', payload) / expect(request).to.have.been.calledOnce - const data = JSON.parse(request.getCall(0).args[0]) - expect(data.payload).to.deep.equal(arrayPayload) + const data = JSON.parse(request.getCall(0).args[0]) + const expectedPayload = [{ + 'request_type': 'req-type-2', + 'payload': { + integrations: [ + { name: 'foo2', enabled: true, auto_enabled: true }, + { name: 'bar2', enabled: false, auto_enabled: true } + ] + } + }, { + 'request_type': 'req-type-1', + 'payload': { 'foo': 'bar' } + }] + expect(data.request_type).to.equal('message-batch') + expect(data.payload).to.deep.equal(expectedPayload) }) }) From 9cd12b9775b64f808607a5e79078c1222e31c2da Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Tue, 12 Dec 2023 10:07:08 +0100 Subject: [PATCH 101/147] Emit address and port as separate labels; it's more compact like that and avoids string concatenation (#3859) --- integration-tests/profiler.spec.js | 7 +++++-- packages/dd-trace/src/profiling/profilers/events.js | 10 +++++++--- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index 654d89a25ce..1f83671d730 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -200,12 +200,13 @@ describe('profiler', () => { const eventKey = strings.dedup('event') const hostKey = strings.dedup('host') const addressKey = strings.dedup('address') + const portKey = strings.dedup('port') const threadNameKey = strings.dedup('thread name') const nameKey = strings.dedup('operation') const dnsEventValue = strings.dedup('dns') const dnsEvents = [] for (const sample of prof.sample) { - let ts, event, host, address, name, threadName + let ts, event, host, address, port, name, threadName for (const label of sample.label) { switch (label.key) { case tsKey: ts = label.num; break @@ -213,6 +214,7 @@ describe('profiler', () => { case eventKey: event = label.str; break case hostKey: host = label.str; break case addressKey: address = label.str; break + case portKey: port = label.num; break case threadNameKey: threadName = label.str; break default: assert.fail(`Unexpected label key ${label.key} ${strings.strings[label.key]}`) } @@ -231,6 +233,7 @@ describe('profiler', () => { const ev = { name: strings.strings[name] } if (address) { ev.address = strings.strings[address] + ev.port = port } else { ev.host = strings.strings[host] } @@ -242,7 +245,7 @@ describe('profiler', () => { { name: 'lookup', host: 'example.com' }, { name: 'lookup', host: 'datadoghq.com' }, { name: 'queryA', host: 'datadoghq.com' }, - { name: 'lookupService', address: '13.224.103.60:80' } + { name: 'lookupService', address: '13.224.103.60', port: 80 } ]) }) diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index 417a91d80d4..7c6caa267f9 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -113,6 +113,7 @@ class DNSDecorator { this.operationNameLabelKey = stringTable.dedup('operation') this.hostLabelKey = stringTable.dedup('host') this.addressLabelKey = stringTable.dedup('address') + this.portLabelKey = stringTable.dedup('port') this.lanes = new Lanes(stringTable, `${threadNamePrefix} DNS`) } @@ -130,7 +131,8 @@ class DNSDecorator { addLabel(this.hostLabelKey, detail.hostname) break case 'lookupService': - addLabel(this.addressLabelKey, `${detail.host}:${detail.port}`) + addLabel(this.addressLabelKey, detail.host) + labels.push(new Label({ key: this.portLabelKey, num: detail.port })) break case 'getHostByAddr': addLabel(this.addressLabelKey, detail.host) @@ -148,7 +150,8 @@ class NetDecorator { constructor (stringTable) { this.stringTable = stringTable this.operationNameLabelKey = stringTable.dedup('operation') - this.addressLabelKey = stringTable.dedup('address') + this.hostLabelKey = stringTable.dedup('host') + this.portLabelKey = stringTable.dedup('port') this.lanes = new Lanes(stringTable, `${threadNamePrefix} Net`) } @@ -162,7 +165,8 @@ class NetDecorator { addLabel(this.operationNameLabelKey, op) if (op === 'connect') { const detail = item.detail - addLabel(this.addressLabelKey, `${detail.host}:${detail.port}`) + addLabel(this.stringTable, this.hostLabelKey, detail.host) + labels.push(new Label({ key: this.portLabelKey, num: detail.port })) } labels.push(this.lanes.getLabelFor(item)) } From 58472f850ba5c93741d0dae1274d44f529656cfe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 12 Dec 2023 10:29:24 +0100 Subject: [PATCH 102/147] [ci-visibility] Speed up git unshallow (#3839) --- .../exporters/git/git_metadata.js | 131 ++++++++++-------- packages/dd-trace/src/plugins/util/git.js | 4 +- .../exporters/git/git_metadata.spec.js | 28 +++- .../dd-trace/test/plugins/util/git.spec.js | 10 +- 4 files changed, 104 insertions(+), 69 deletions(-) diff --git a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js index 7c5201566ff..2747b9746e7 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js +++ b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js @@ -10,7 +10,7 @@ const { getLatestCommits, getRepositoryUrl, generatePackFilesForCommits, - getCommitsToUpload, + getCommitsRevList, isShallowRepository, unshallowRepository } = require('../../../plugins/util/git') @@ -46,11 +46,7 @@ function getCommonRequestOptions (url) { * The response are the commits for which the backend already has information * This response is used to know which commits can be ignored from there on */ -function getCommitsToExclude ({ url, isEvpProxy, repositoryUrl }, callback) { - const latestCommits = getLatestCommits() - - log.debug(`There were ${latestCommits.length} commits since last month.`) - +function getCommitsToUpload ({ url, repositoryUrl, latestCommits, isEvpProxy }, callback) { const commonOptions = getCommonRequestOptions(url) const options = { @@ -83,13 +79,23 @@ function getCommitsToExclude ({ url, isEvpProxy, repositoryUrl }, callback) { const error = new Error(`Error fetching commits to exclude: ${err.message}`) return callback(error) } - let commitsToExclude + let alreadySeenCommits try { - commitsToExclude = validateCommits(JSON.parse(response).data) + alreadySeenCommits = validateCommits(JSON.parse(response).data) } catch (e) { return callback(new Error(`Can't parse commits to exclude response: ${e.message}`)) } - callback(null, commitsToExclude, latestCommits) + log.debug(`There are ${alreadySeenCommits.length} commits to exclude.`) + const commitsToInclude = latestCommits.filter((commit) => !alreadySeenCommits.includes(commit)) + log.debug(`There are ${commitsToInclude.length} commits to include.`) + + if (!commitsToInclude.length) { + return callback(null, []) + } + + const commitsToUpload = getCommitsRevList(alreadySeenCommits, commitsToInclude) + + callback(null, commitsToUpload) }) } @@ -150,6 +156,53 @@ function uploadPackFile ({ url, isEvpProxy, packFileToUpload, repositoryUrl, hea }) } +function generateAndUploadPackFiles ({ + url, + isEvpProxy, + commitsToUpload, + repositoryUrl, + headCommit +}, callback) { + log.debug(`There are ${commitsToUpload.length} commits to upload`) + + const packFilesToUpload = generatePackFilesForCommits(commitsToUpload) + + log.debug(`Uploading ${packFilesToUpload.length} packfiles.`) + + if (!packFilesToUpload.length) { + return callback(new Error('Failed to generate packfiles')) + } + + let packFileIndex = 0 + // This uploads packfiles sequentially + const uploadPackFileCallback = (err) => { + if (err || packFileIndex === packFilesToUpload.length) { + return callback(err) + } + return uploadPackFile( + { + packFileToUpload: packFilesToUpload[packFileIndex++], + url, + isEvpProxy, + repositoryUrl, + headCommit + }, + uploadPackFileCallback + ) + } + + uploadPackFile( + { + packFileToUpload: packFilesToUpload[packFileIndex++], + url, + isEvpProxy, + repositoryUrl, + headCommit + }, + uploadPackFileCallback + ) +} + /** * This function uploads git metadata to CI Visibility's backend. */ @@ -165,65 +218,31 @@ function sendGitMetadata (url, isEvpProxy, configRepositoryUrl, callback) { return callback(new Error('Repository URL is empty')) } - if (isShallowRepository()) { - log.debug('It is shallow clone, unshallowing...') - unshallowRepository() - } + const latestCommits = getLatestCommits() + log.debug(`There were ${latestCommits.length} commits since last month.`) + const [headCommit] = latestCommits - getCommitsToExclude({ url, repositoryUrl, isEvpProxy }, (err, commitsToExclude, latestCommits) => { + const getOnFinishGetCommitsToUpload = (hasCheckedShallow) => (err, commitsToUpload) => { if (err) { return callback(err) } - log.debug(`There are ${commitsToExclude.length} commits to exclude.`) - const [headCommit] = latestCommits - const commitsToInclude = latestCommits.filter((commit) => !commitsToExclude.includes(commit)) - log.debug(`There are ${commitsToInclude.length} commits to include.`) - - const commitsToUpload = getCommitsToUpload(commitsToExclude, commitsToInclude) if (!commitsToUpload.length) { log.debug('No commits to upload') return callback(null) } - log.debug(`There are ${commitsToUpload.length} commits to upload`) - - const packFilesToUpload = generatePackFilesForCommits(commitsToUpload) - - log.debug(`Uploading ${packFilesToUpload.length} packfiles.`) - - if (!packFilesToUpload.length) { - return callback(new Error('Failed to generate packfiles')) - } - let packFileIndex = 0 - // This uploads packfiles sequentially - const uploadPackFileCallback = (err) => { - if (err || packFileIndex === packFilesToUpload.length) { - return callback(err) - } - return uploadPackFile( - { - packFileToUpload: packFilesToUpload[packFileIndex++], - url, - isEvpProxy, - repositoryUrl, - headCommit - }, - uploadPackFileCallback - ) + // If it has already unshallowed or the clone is not shallow, we move on + if (hasCheckedShallow || !isShallowRepository()) { + return generateAndUploadPackFiles({ url, isEvpProxy, commitsToUpload, repositoryUrl, headCommit }, callback) } + // Otherwise we unshallow and get commits to upload again + log.debug('It is shallow clone, unshallowing...') + unshallowRepository() + getCommitsToUpload({ url, repositoryUrl, latestCommits, isEvpProxy }, getOnFinishGetCommitsToUpload(true)) + } - uploadPackFile( - { - packFileToUpload: packFilesToUpload[packFileIndex++], - url, - isEvpProxy, - repositoryUrl, - headCommit - }, - uploadPackFileCallback - ) - }) + getCommitsToUpload({ url, repositoryUrl, latestCommits, isEvpProxy }, getOnFinishGetCommitsToUpload(false)) } module.exports = { diff --git a/packages/dd-trace/src/plugins/util/git.js b/packages/dd-trace/src/plugins/util/git.js index 3a640ff249b..72ca5db3b59 100644 --- a/packages/dd-trace/src/plugins/util/git.js +++ b/packages/dd-trace/src/plugins/util/git.js @@ -111,7 +111,7 @@ function getLatestCommits () { } } -function getCommitsToUpload (commitsToExclude, commitsToInclude) { +function getCommitsRevList (commitsToExclude, commitsToInclude) { const commitsToExcludeString = commitsToExclude.map(commit => `^${commit}`) try { @@ -236,7 +236,7 @@ module.exports = { getLatestCommits, getRepositoryUrl, generatePackFilesForCommits, - getCommitsToUpload, + getCommitsRevList, GIT_REV_LIST_MAX_BUFFER, isShallowRepository, unshallowRepository diff --git a/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js b/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js index f7cb6c5e358..d50e4a9cb66 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js @@ -23,7 +23,7 @@ describe('git_metadata', () => { let getLatestCommitsStub let getRepositoryUrlStub - let getCommitsToUploadStub + let getCommitsRevListStub let generatePackFilesForCommitsStub let isShallowRepositoryStub let unshallowRepositoryStub @@ -42,7 +42,7 @@ describe('git_metadata', () => { beforeEach(() => { getLatestCommitsStub = sinon.stub().returns(latestCommits) - getCommitsToUploadStub = sinon.stub().returns(latestCommits) + getCommitsRevListStub = sinon.stub().returns(latestCommits) getRepositoryUrlStub = sinon.stub().returns('git@github.com:DataDog/dd-trace-js.git') isShallowRepositoryStub = sinon.stub().returns(false) unshallowRepositoryStub = sinon.stub() @@ -54,7 +54,7 @@ describe('git_metadata', () => { getLatestCommits: getLatestCommitsStub, getRepositoryUrl: getRepositoryUrlStub, generatePackFilesForCommits: generatePackFilesForCommitsStub, - getCommitsToUpload: getCommitsToUploadStub, + getCommitsRevList: getCommitsRevListStub, isShallowRepository: isShallowRepositoryStub, unshallowRepository: unshallowRepositoryStub } @@ -65,10 +65,26 @@ describe('git_metadata', () => { nock.cleanAll() }) - it('should unshallow if the repo is shallow', (done) => { + it('does not unshallow if every commit is already in backend', (done) => { + const scope = nock('https://api.test.com') + .post('/api/v2/git/repository/search_commits') + .reply(200, JSON.stringify({ data: latestCommits.map((sha) => ({ id: sha, type: 'commit' })) })) + + isShallowRepositoryStub.returns(true) + gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { + expect(unshallowRepositoryStub).not.to.have.been.called + expect(err).to.be.null + expect(scope.isDone()).to.be.true + done() + }) + }) + + it('should unshallow if the repo is shallow and not every commit is in the backend', (done) => { const scope = nock('https://api.test.com') .post('/api/v2/git/repository/search_commits') .reply(200, JSON.stringify({ data: [] })) + .post('/api/v2/git/repository/search_commits') // calls a second time after unshallowing + .reply(200, JSON.stringify({ data: [] })) .post('/api/v2/git/repository/packfile') .reply(204) @@ -102,7 +118,7 @@ describe('git_metadata', () => { .post('/api/v2/git/repository/packfile') .reply(204) - getCommitsToUploadStub.returns([]) + getCommitsRevListStub.returns([]) gitMetadata.sendGitMetadata(new URL('https://api.test.com'), false, '', (err) => { expect(err).to.be.null @@ -165,7 +181,7 @@ describe('git_metadata', () => { it('should fail if the packfile request returns anything other than 204', (done) => { const scope = nock('https://api.test.com') .post('/api/v2/git/repository/search_commits') - .reply(200, JSON.stringify({ data: latestCommits.map((sha) => ({ id: sha, type: 'commit' })) })) + .reply(200, JSON.stringify({ data: [] })) .post('/api/v2/git/repository/packfile') .reply(502) diff --git a/packages/dd-trace/test/plugins/util/git.spec.js b/packages/dd-trace/test/plugins/util/git.spec.js index 90553564f98..c04bd371dbb 100644 --- a/packages/dd-trace/test/plugins/util/git.spec.js +++ b/packages/dd-trace/test/plugins/util/git.spec.js @@ -142,11 +142,11 @@ describe('git', () => { }) }) -describe('getCommitsToUpload', () => { +describe('getCommitsRevList', () => { it('gets the commits to upload if the repository is smaller than the limit', () => { const logErrorSpy = sinon.spy() - const { getCommitsToUpload } = proxyquire('../../../src/plugins/util/git', + const { getCommitsRevList } = proxyquire('../../../src/plugins/util/git', { 'child_process': { 'execFileSync': (command, flags, options) => @@ -157,14 +157,14 @@ describe('getCommitsToUpload', () => { } } ) - getCommitsToUpload([], []) + getCommitsRevList([], []) expect(logErrorSpy).not.to.have.been.called }) it('does not crash and logs the error if the repository is bigger than the limit', () => { const logErrorSpy = sinon.spy() - const { getCommitsToUpload } = proxyquire('../../../src/plugins/util/git', + const { getCommitsRevList } = proxyquire('../../../src/plugins/util/git', { 'child_process': { 'execFileSync': (command, flags, options) => @@ -175,7 +175,7 @@ describe('getCommitsToUpload', () => { } } ) - const commitsToUpload = getCommitsToUpload([], []) + const commitsToUpload = getCommitsRevList([], []) expect(logErrorSpy).to.have.been.called expect(commitsToUpload.length).to.equal(0) }) From 72db78b52ed83f449d7d605aa5e0b28f4c5bb0b1 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Tue, 12 Dec 2023 16:39:00 +0100 Subject: [PATCH 103/147] Update rules version to v1.10.0 (#3857) --- packages/dd-trace/src/appsec/recommended.json | 1397 ++++++++++++++++- packages/dd-trace/src/appsec/reporter.js | 6 +- .../dd-trace/test/appsec/reporter.spec.js | 5 +- 3 files changed, 1400 insertions(+), 8 deletions(-) diff --git a/packages/dd-trace/src/appsec/recommended.json b/packages/dd-trace/src/appsec/recommended.json index fc316459b63..d572c003911 100644 --- a/packages/dd-trace/src/appsec/recommended.json +++ b/packages/dd-trace/src/appsec/recommended.json @@ -1,7 +1,7 @@ { "version": "2.2", "metadata": { - "rules_version": "1.9.0" + "rules_version": "1.10.0" }, "rules": [ { @@ -118,6 +118,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "list": [ @@ -346,6 +349,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "list": [ @@ -1839,6 +1845,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "^(?i:file|ftps?)://.*?\\?+$", @@ -1881,6 +1890,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "list": [ @@ -2391,6 +2403,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "^\\(\\s*\\)\\s+{", @@ -2547,6 +2562,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "list": [ @@ -2608,6 +2626,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?:HTTP_(?:ACCEPT(?:_(?:ENCODING|LANGUAGE|CHARSET))?|(?:X_FORWARDED_FO|REFERE)R|(?:USER_AGEN|HOS)T|CONNECTION|KEEP_ALIVE)|PATH_(?:TRANSLATED|INFO)|ORIG_PATH_INFO|QUERY_STRING|REQUEST_URI|AUTH_TYPE)", @@ -2650,6 +2671,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "php://(?:std(?:in|out|err)|(?:in|out)put|fd|memory|temp|filter)", @@ -2691,6 +2715,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "list": [ @@ -2775,6 +2802,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\b(?:s(?:e(?:t(?:_(?:e(?:xception|rror)_handler|magic_quotes_runtime|include_path)|defaultstub)|ssion_s(?:et_save_handler|tart))|qlite_(?:(?:(?:unbuffered|single|array)_)?query|create_(?:aggregate|function)|p?open|exec)|tr(?:eam_(?:context_create|socket_client)|ipc?slashes|rev)|implexml_load_(?:string|file)|ocket_c(?:onnect|reate)|h(?:ow_sourc|a1_fil)e|pl_autoload_register|ystem)|p(?:r(?:eg_(?:replace(?:_callback(?:_array)?)?|match(?:_all)?|split)|oc_(?:(?:terminat|clos|nic)e|get_status|open)|int_r)|o(?:six_(?:get(?:(?:e[gu]|g)id|login|pwnam)|mk(?:fifo|nod)|ttyname|kill)|pen)|hp(?:_(?:strip_whitespac|unam)e|version|info)|g_(?:(?:execut|prepar)e|connect|query)|a(?:rse_(?:ini_file|str)|ssthru)|utenv)|r(?:unkit_(?:function_(?:re(?:defin|nam)e|copy|add)|method_(?:re(?:defin|nam)e|copy|add)|constant_(?:redefine|add))|e(?:(?:gister_(?:shutdown|tick)|name)_function|ad(?:(?:gz)?file|_exif_data|dir))|awurl(?:de|en)code)|i(?:mage(?:createfrom(?:(?:jpe|pn)g|x[bp]m|wbmp|gif)|(?:jpe|pn)g|g(?:d2?|if)|2?wbmp|xbm)|s_(?:(?:(?:execut|write?|read)ab|fi)le|dir)|ni_(?:get(?:_all)?|set)|terator_apply|ptcembed)|g(?:et(?:_(?:c(?:urrent_use|fg_va)r|meta_tags)|my(?:[gpu]id|inode)|(?:lastmo|cw)d|imagesize|env)|z(?:(?:(?:defla|wri)t|encod|fil)e|compress|open|read)|lob)|a(?:rray_(?:u(?:intersect(?:_u?assoc)?|diff(?:_u?assoc)?)|intersect_u(?:assoc|key)|diff_u(?:assoc|key)|filter|reduce|map)|ssert(?:_options)?|tob)|h(?:tml(?:specialchars(?:_decode)?|_entity_decode|entities)|(?:ash(?:_(?:update|hmac))?|ighlight)_file|e(?:ader_register_callback|x2bin))|f(?:i(?:le(?:(?:[acm]tim|inod)e|(?:_exist|perm)s|group)?|nfo_open)|tp_(?:nb_(?:ge|pu)|connec|ge|pu)t|(?:unction_exis|pu)ts|write|open)|o(?:b_(?:get_(?:c(?:ontents|lean)|flush)|end_(?:clean|flush)|clean|flush|start)|dbc_(?:result(?:_all)?|exec(?:ute)?|connect)|pendir)|m(?:b_(?:ereg(?:_(?:replace(?:_callback)?|match)|i(?:_replace)?)?|parse_str)|(?:ove_uploaded|d5)_file|ethod_exists|ysql_query|kdir)|e(?:x(?:if_(?:t(?:humbnail|agname)|imagetype|read_data)|ec)|scapeshell(?:arg|cmd)|rror_reporting|val)|c(?:url_(?:file_create|exec|init)|onvert_uuencode|reate_function|hr)|u(?:n(?:serialize|pack)|rl(?:de|en)code|[ak]?sort)|b(?:(?:son_(?:de|en)|ase64_en)code|zopen|toa)|(?:json_(?:de|en)cod|debug_backtrac|tmpfil)e|var_dump)(?:\\s|/\\*.*\\*/|//.*|#.*|\\\"|')*\\((?:(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:\\$\\w+|[A-Z\\d]\\w*|\\w+\\(.*\\)|\\\\?\"(?:[^\"]|\\\\\"|\"\"|\"\\+\")*\\\\?\"|\\\\?'(?:[^']|''|'\\+')*\\\\?')(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:(?:::|\\.|->)(?:\\s|/\\*.*\\*/|//.*|#.*)*\\w+(?:\\(.*\\))?)?,)*(?:(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:\\$\\w+|[A-Z\\d]\\w*|\\w+\\(.*\\)|\\\\?\"(?:[^\"]|\\\\\"|\"\"|\"\\+\")*\\\\?\"|\\\\?'(?:[^']|''|'\\+')*\\\\?')(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:(?:::|\\.|->)(?:\\s|/\\*.*\\*/|//.*|#.*)*\\w+(?:\\(.*\\))?)?)?\\)", @@ -2820,6 +2850,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "[oOcC]:\\d+:\\\".+?\\\":\\d+:{[\\W\\w]*}", @@ -2861,6 +2894,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?:(?:bzip|ssh)2|z(?:lib|ip)|(?:ph|r)ar|expect|glob|ogg)://", @@ -2904,6 +2940,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\b(?:(?:l(?:(?:utimes|chmod)(?:Sync)?|(?:stat|ink)Sync)|w(?:rite(?:(?:File|v)(?:Sync)?|Sync)|atchFile)|u(?:n(?:watchFile|linkSync)|times(?:Sync)?)|s(?:(?:ymlink|tat)Sync|pawn(?:File|Sync))|ex(?:ec(?:File(?:Sync)?|Sync)|istsSync)|a(?:ppendFile|ccess)(?:Sync)?|(?:Caveat|Inode)s|open(?:dir)?Sync|new\\s+Function|Availability|\\beval)\\s*\\(|m(?:ain(?:Module\\s*(?:\\W*\\s*(?:constructor|require)|\\[)|\\s*(?:\\W*\\s*(?:constructor|require)|\\[))|kd(?:temp(?:Sync)?|irSync)\\s*\\(|odule\\.exports\\s*=)|c(?:(?:(?:h(?:mod|own)|lose)Sync|reate(?:Write|Read)Stream|p(?:Sync)?)\\s*\\(|o(?:nstructor\\s*(?:\\W*\\s*_load|\\[)|pyFile(?:Sync)?\\s*\\())|f(?:(?:(?:s(?:(?:yncS)?|tatS)|datas(?:yncS)?)ync|ch(?:mod|own)(?:Sync)?)\\s*\\(|u(?:nction\\s*\\(\\s*\\)\\s*{|times(?:Sync)?\\s*\\())|r(?:e(?:(?:ad(?:(?:File|link|dir)?Sync|v(?:Sync)?)|nameSync)\\s*\\(|quire\\s*(?:\\W*\\s*main|\\[))|m(?:Sync)?\\s*\\()|process\\s*(?:\\W*\\s*(?:mainModule|binding)|\\[)|t(?:his\\.constructor|runcateSync\\s*\\()|_(?:\\$\\$ND_FUNC\\$\\$_|_js_function)|global\\s*(?:\\W*\\s*process|\\[)|String\\s*\\.\\s*fromCharCode|binding\\s*\\[)", @@ -2946,6 +2985,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\b(?:w(?:atch|rite)|(?:spaw|ope)n|exists|close|fork|read)\\s*\\(", @@ -3000,6 +3042,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "]*>[\\s\\S]*?", @@ -3057,6 +3102,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\bon(?:d(?:r(?:ag(?:en(?:ter|d)|leave|start|over)?|op)|urationchange|blclick)|s(?:e(?:ek(?:ing|ed)|arch|lect)|u(?:spend|bmit)|talled|croll|how)|m(?:ouse(?:(?:lea|mo)ve|o(?:ver|ut)|enter|down|up)|essage)|p(?:a(?:ge(?:hide|show)|(?:st|us)e)|lay(?:ing)?|rogress|aste|ointer(?:cancel|down|enter|leave|move|out|over|rawupdate|up))|c(?:anplay(?:through)?|o(?:ntextmenu|py)|hange|lick|ut)|a(?:nimation(?:iteration|start|end)|(?:fterprin|bor)t|uxclick|fterscriptexecute)|t(?:o(?:uch(?:cancel|start|move|end)|ggle)|imeupdate)|f(?:ullscreen(?:change|error)|ocus(?:out|in)?|inish)|(?:(?:volume|hash)chang|o(?:ff|n)lin)e|b(?:efore(?:unload|print)|lur)|load(?:ed(?:meta)?data|start|end)?|r(?:es(?:ize|et)|atechange)|key(?:press|down|up)|w(?:aiting|heel)|in(?:valid|put)|e(?:nded|rror)|unload)[\\s\\x0B\\x09\\x0C\\x3B\\x2C\\x28\\x3B]*?=[^=]", @@ -3113,6 +3161,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "[a-z]+=(?:[^:=]+:.+;)*?[^:=]+:url\\(javascript", @@ -3169,6 +3220,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?:\\W|^)(?:javascript:(?:[\\s\\S]+[=\\x5c\\(\\[\\.<]|[\\s\\S]*?(?:\\bname\\b|\\x5c[ux]\\d)))|@\\W*?i\\W*?m\\W*?p\\W*?o\\W*?r\\W*?t\\W*?(?:/\\*[\\s\\S]*?)?(?:[\\\"']|\\W*?u\\W*?r\\W*?l[\\s\\S]*?\\()|[^-]*?-\\W*?m\\W*?o\\W*?z\\W*?-\\W*?b\\W*?i\\W*?n\\W*?d\\W*?i\\W*?n\\W*?g[^:]*?:\\W*?u\\W*?r\\W*?l[\\s\\S]*?\\(", @@ -3212,6 +3266,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "list": [ @@ -3260,6 +3317,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i:<.*[:]?vmlframe.*?[\\s/+]*?src[\\s/+]*=)", @@ -3304,6 +3364,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i:(?:j|&#x?0*(?:74|4A|106|6A);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:a|&#x?0*(?:65|41|97|61);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:v|&#x?0*(?:86|56|118|76);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:a|&#x?0*(?:65|41|97|61);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:s|&#x?0*(?:83|53|115|73);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:c|&#x?0*(?:67|43|99|63);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:r|&#x?0*(?:82|52|114|72);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:i|&#x?0*(?:73|49|105|69);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:p|&#x?0*(?:80|50|112|70);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:t|&#x?0*(?:84|54|116|74);?)(?:\\t|\\n|\\r|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?::|&(?:#x?0*(?:58|3A);?|colon;)).)", @@ -3348,6 +3411,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i:(?:v|&#x?0*(?:86|56|118|76);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:b|&#x?0*(?:66|42|98|62);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:s|&#x?0*(?:83|53|115|73);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:c|&#x?0*(?:67|43|99|63);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:r|&#x?0*(?:82|52|114|72);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:i|&#x?0*(?:73|49|105|69);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:p|&#x?0*(?:80|50|112|70);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?:t|&#x?0*(?:84|54|116|74);?)(?:\\t|&(?:#x?0*(?:9|13|10|A|D);?|tab;|newline;))*(?::|&(?:#x?0*(?:58|3A);?|colon;)).)", @@ -3392,6 +3458,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "]", @@ -3608,6 +3689,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": ")|<.*\\+AD4-", @@ -3692,6 +3779,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "![!+ ]\\[\\]", @@ -3734,6 +3824,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\b(?i:eval|settimeout|setinterval|new\\s+Function|alert|prompt)[\\s+]*\\([^\\)]", @@ -3775,6 +3868,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ] }, @@ -3814,6 +3910,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i:sleep\\(\\s*?\\d*?\\s*?\\)|benchmark\\(.*?\\,.*?\\))", @@ -3856,6 +3955,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?:[\\\"'`](?:;*?\\s*?waitfor\\s+(?:delay|time)\\s+[\\\"'`]|;.*?:\\s*?goto)|alter\\s*?\\w+.*?cha(?:racte)?r\\s+set\\s+\\w+)", @@ -3896,6 +3998,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i:merge.*?using\\s*?\\(|execute\\s*?immediate\\s*?[\\\"'`]|match\\s*?[\\w(?:),+-]+\\s*?against\\s*?\\()", @@ -3937,6 +4042,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "union.*?select.*?from", @@ -3978,6 +4086,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?:;\\s*?shutdown\\s*?(?:[#;{]|\\/\\*|--)|waitfor\\s*?delay\\s?[\\\"'`]+\\s?\\d|select\\s*?pg_sleep)", @@ -4018,6 +4129,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i:(?:\\[?\\$(?:(?:s(?:lic|iz)|wher)e|e(?:lemMatch|xists|q)|n(?:o[rt]|in?|e)|l(?:ike|te?)|t(?:ext|ype)|a(?:ll|nd)|jsonSchema|between|regex|x?or|div|mod)\\]?)\\b)", @@ -4061,6 +4175,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?:^[\\W\\d]+\\s*?(?:alter\\s*(?:a(?:(?:pplication\\s*rol|ggregat)e|s(?:ymmetric\\s*ke|sembl)y|u(?:thorization|dit)|vailability\\s*group)|c(?:r(?:yptographic\\s*provider|edential)|o(?:l(?:latio|um)|nversio)n|ertificate|luster)|s(?:e(?:rv(?:ice|er)|curity|quence|ssion|arch)|y(?:mmetric\\s*key|nonym)|togroup|chema)|m(?:a(?:s(?:ter\\s*key|k)|terialized)|e(?:ssage\\s*type|thod)|odule)|l(?:o(?:g(?:file\\s*group|in)|ckdown)|a(?:ngua|r)ge|ibrary)|t(?:(?:abl(?:espac)?|yp)e|r(?:igger|usted)|hreshold|ext)|p(?:a(?:rtition|ckage)|ro(?:cedur|fil)e|ermission)|d(?:i(?:mension|skgroup)|atabase|efault|omain)|r(?:o(?:l(?:lback|e)|ute)|e(?:sourc|mot)e)|f(?:u(?:lltext|nction)|lashback|oreign)|e(?:xte(?:nsion|rnal)|(?:ndpoi|ve)nt)|in(?:dex(?:type)?|memory|stance)|b(?:roker\\s*priority|ufferpool)|x(?:ml\\s*schema|srobject)|w(?:ork(?:load)?|rapper)|hi(?:erarchy|stogram)|o(?:perator|utline)|(?:nicknam|queu)e|us(?:age|er)|group|java|view)|union\\s*(?:(?:distin|sele)ct|all))\\b|\\b(?:(?:(?:trunc|cre|upd)at|renam)e|(?:inser|selec)t|de(?:lete|sc)|alter|load)\\s+(?:group_concat|load_file|char)\\b\\s*\\(?|[\\s(]load_file\\s*?\\(|[\\\"'`]\\s+regexp\\W)", @@ -4101,6 +4218,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i:/\\*[!+](?:[\\w\\s=_\\-(?:)]+)?\\*/)", @@ -4143,6 +4263,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i:\\.cookie\\b.*?;\\W*?(?:expires|domain)\\W*?=|\\bhttp-equiv\\W+set-cookie\\b)", @@ -4188,6 +4311,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "java\\.lang\\.(?:runtime|processbuilder)", @@ -4233,6 +4359,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?:unmarshaller|base64data|java\\.).*(?:runtime|processbuilder)", @@ -4277,6 +4406,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "list": [ @@ -4312,6 +4444,7 @@ "java.lang.object", "java.lang.process", "java.lang.reflect", + "java.lang.runtime", "java.lang.string", "java.lang.stringbuilder", "java.lang.system", @@ -4362,6 +4495,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?:class\\.module\\.classLoader\\.resources\\.context\\.parent\\.pipeline|springframework\\.context\\.support\\.FileSystemXmlApplicationContext)", @@ -4403,6 +4539,9 @@ { "address": "graphql.server.all_resolvers" }, + { + "address": "graphql.server.resolver" + }, { "address": "server.request.headers.no_cookies" } @@ -4448,6 +4587,9 @@ { "address": "graphql.server.all_resolvers" }, + { + "address": "graphql.server.resolver" + }, { "address": "server.request.headers.no_cookies" } @@ -4493,6 +4635,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "[@#]ognl", @@ -4639,6 +4784,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "#(?:set|foreach|macro|parse|if)\\(.*\\)|<#assign.*>" @@ -4680,6 +4828,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\b(?:burpcollaborator\\.net|oastify\\.com)\\b" @@ -4721,6 +4872,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\bqualysperiscope\\.com\\b|\\.oscomm\\." @@ -4762,6 +4916,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\bprbly\\.win\\b" @@ -4802,6 +4959,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\b(?:webhook\\.site|\\.canarytokens\\.com|vii\\.one|act1on3\\.ru|gdsburp\\.com|arcticwolf\\.net|oob\\.li|htbiw\\.com|h4\\.vc|mochan\\.cloud|imshopping\\.com|bootstrapnodejs\\.com|mooo-ng\\.com|securitytrails\\.com|canyouhackit\\.io|7bae\\.xyz)\\b" @@ -4842,6 +5002,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\b(?:\\.ngrok\\.io|requestbin\\.com|requestbin\\.net)\\b" @@ -4883,6 +5046,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\bappspidered\\.rapid7\\." @@ -4924,6 +5090,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\b(?:interact\\.sh|oast\\.(?:pro|live|site|online|fun|me)|indusfacefinder\\.in|where\\.land|syhunt\\.net|tssrt\\.de|boardofcyber\\.io|assetnote-callback\\.com|praetorianlabs\\.dev|netspi\\.sh)\\b" @@ -4965,6 +5134,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\b(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)?r87(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)(?:me|com)\\b", @@ -5010,6 +5182,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\bwhsec(?:\\.|(?:\\\\|&#)(?:0*46|x0*2e);)us\\b", @@ -5055,6 +5230,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\b\\.nessus\\.org\\b", @@ -5100,6 +5278,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\bwatchtowr\\.com\\b", @@ -5145,6 +5326,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\bptst\\.io\\b", @@ -5186,6 +5370,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "^(?i:file|ftps?|https?).*/rfiinc\\.txt\\?+$", @@ -5230,6 +5417,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?:(?:['\"\\x60({|;&]|(?:^|['\"\\x60({|;&])(?:cmd(?:\\.exe)?\\s+(?:/\\w(?::\\w+)?\\s+)*))(?:ping|curl|wget|telnet)|\\bnslookup)[\\s,]", @@ -5265,6 +5455,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?:<\\?xml[^>]*>.*)]+SYSTEM\\s+[^>]+>", @@ -5318,6 +5511,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "<(?:iframe|esi:include)(?:(?:\\s|/)*\\w+=[\"'\\w]+)*(?:\\s|/)*src(?:doc)?=[\"']?(?:data:|javascript:|http:|dns:|//)[^\\s'\"]+['\"]?", @@ -5364,6 +5560,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "https?:\\/\\/(?:.*\\.)?(?:bxss\\.(?:in|me)|xss\\.ht|js\\.rip)", @@ -5949,6 +6148,48 @@ ], "transformers": [] }, + { + "id": "nfd-000-010", + "name": "Detect failed attempts to find API documentation", + "tags": { + "type": "security_scanner", + "category": "attack_attempt", + "cwe": "200", + "capec": "1000/118/169", + "confidence": "0" + }, + "conditions": [ + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.response.status" + } + ], + "regex": "^404$", + "options": { + "case_sensitive": true + } + } + }, + { + "operator": "match_regex", + "parameters": { + "inputs": [ + { + "address": "server.request.uri.raw" + } + ], + "regex": "(?:/swagger\\b|/api[-/]docs?\\b)", + "options": { + "case_sensitive": false + } + } + } + ], + "transformers": [] + }, { "id": "sqr-000-001", "name": "SSRF: Try to access the credential manager of the main cloud services", @@ -5977,6 +6218,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i)^\\W*((http|ftp)s?://)?\\W*((::f{4}:)?(169|(0x)?0*a9|0+251)\\.?(254|(0x)?0*fe|0+376)[0-9a-fx\\.:]+|metadata\\.google\\.internal|metadata\\.goog)\\W*/", @@ -6018,6 +6262,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "require\\(['\"][\\w\\.]+['\"]\\)|process\\.\\w+\\([\\w\\.]*\\)|\\.toString\\(\\)", @@ -6063,6 +6310,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i)[&|]\\s*type\\s+%\\w+%\\\\+\\w+\\.ini\\s*[&|]" @@ -6103,6 +6353,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i)[&|]\\s*cat\\s*\\/etc\\/[\\w\\.\\/]*passwd\\s*[&|]" @@ -6145,6 +6398,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(?i)[&|]\\s*timeout\\s+/t\\s+\\d+\\s*[&|]" @@ -6182,6 +6438,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "http(s?):\\/\\/([A-Za-z0-9\\.\\-\\_]+|\\[[A-Fa-f0-9\\:]+\\]|):5986\\/wsman", @@ -6222,6 +6481,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "^(jar:)?(http|https):\\/\\/([0-9oq]{1,5}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}|[0-9]{1,10})(:[0-9]{1,5})?(\\/[^:@]*)?$" @@ -6261,6 +6523,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "^(jar:)?(http|https):\\/\\/((\\[)?[:0-9a-f\\.x]{2,}(\\])?)(:[0-9]{1,5})?(\\/[^:@]*)?$" @@ -6303,6 +6568,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "(http|https):\\/\\/(?:.*\\.)?(?:burpcollaborator\\.net|localtest\\.me|mail\\.ebc\\.apple\\.com|bugbounty\\.dod\\.network|.*\\.[nx]ip\\.io|oastify\\.com|oast\\.(?:pro|live|site|online|fun|me)|sslip\\.io|requestbin\\.com|requestbin\\.net|hookbin\\.com|webhook\\.site|canarytokens\\.com|interact\\.sh|ngrok\\.io|bugbounty\\.click|prbly\\.win|qualysperiscope\\.com|vii\\.one|act1on3\\.ru)" @@ -6343,6 +6611,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "^(jar:)?((file|netdoc):\\/\\/[\\\\\\/]+|(dict|gopher|ldap|sftp|tftp):\\/\\/.*:[0-9]{1,5})" @@ -6388,6 +6659,9 @@ }, { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "regex": "\\${[^j]*j[^n]*n[^d]*d[^i]*i[^:]*:[^}]*}" @@ -7923,5 +8197,1124 @@ ], "transformers": [] } + ], + "processors": [ + { + "id": "extract-content", + "generator": "extract_schema", + "conditions": [ + { + "operator": "equals", + "parameters": { + "inputs": [ + { + "address": "waf.context.processor", + "key_path": [ + "extract-schema" + ] + } + ], + "type": "boolean", + "value": true + } + } + ], + "parameters": { + "mappings": [ + { + "inputs": [ + { + "address": "server.request.body" + } + ], + "output": "_dd.appsec.s.req.body" + }, + { + "inputs": [ + { + "address": "server.request.cookies" + } + ], + "output": "_dd.appsec.s.req.cookies" + }, + { + "inputs": [ + { + "address": "server.request.query" + } + ], + "output": "_dd.appsec.s.req.query" + }, + { + "inputs": [ + { + "address": "server.request.path_params" + } + ], + "output": "_dd.appsec.s.req.params" + }, + { + "inputs": [ + { + "address": "server.response.body" + } + ], + "output": "_dd.appsec.s.res.body" + }, + { + "inputs": [ + { + "address": "graphql.server.all_resolvers" + } + ], + "output": "_dd.appsec.s.graphql.all_resolvers" + }, + { + "inputs": [ + { + "address": "graphql.server.resolver" + } + ], + "output": "_dd.appsec.s.graphql.resolver" + } + ], + "scanners": [ + { + "tags": { + "category": "payment" + } + }, + { + "tags": { + "category": "pii" + } + } + ] + }, + "evaluate": false, + "output": true + }, + { + "id": "extract-headers", + "generator": "extract_schema", + "conditions": [ + { + "operator": "equals", + "parameters": { + "inputs": [ + { + "address": "waf.context.processor", + "key_path": [ + "extract-schema" + ] + } + ], + "type": "boolean", + "value": true + } + } + ], + "parameters": { + "mappings": [ + { + "inputs": [ + { + "address": "server.request.headers.no_cookies" + } + ], + "output": "_dd.appsec.s.req.headers" + }, + { + "inputs": [ + { + "address": "server.response.headers.no_cookies" + } + ], + "output": "_dd.appsec.s.res.headers" + } + ], + "scanners": [ + { + "tags": { + "category": "credentials" + } + }, + { + "tags": { + "category": "pii" + } + } + ] + }, + "evaluate": false, + "output": true + } + ], + "scanners": [ + { + "id": "JU1sRk3mSzqSUJn6GrVn7g", + "name": "American Express Card Scanner (4+4+4+3 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b3[47]\\d{2}(?:(?:\\s\\d{4}\\s\\d{4}\\s\\d{3})|(?:\\,\\d{4}\\,\\d{4}\\,\\d{3})|(?:-\\d{4}-\\d{4}-\\d{3})|(?:\\.\\d{4}\\.\\d{4}\\.\\d{3}))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "amex", + "category": "payment" + } + }, + { + "id": "edmH513UTQWcRiQ9UnzHlw-mod", + "name": "American Express Card Scanner (4+6|5+5|6 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b3[47]\\d{2}(?:(?:\\s\\d{5,6}\\s\\d{5,6})|(?:\\.\\d{5,6}\\.\\d{5,6})|(?:-\\d{5,6}-\\d{5,6})|(?:,\\d{5,6},\\d{5,6}))\\b", + "options": { + "case_sensitive": false, + "min_length": 17 + } + } + }, + "tags": { + "type": "card", + "card_type": "amex", + "category": "payment" + } + }, + { + "id": "e6K4h_7qTLaMiAbaNXoSZA", + "name": "American Express Card Scanner (8+7 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b3[47]\\d{6}(?:(?:\\s\\d{7})|(?:\\,\\d{7})|(?:-\\d{7})|(?:\\.\\d{7}))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "amex", + "category": "payment" + } + }, + { + "id": "K2rZflWzRhGM9HiTc6whyQ", + "name": "American Express Card Scanner (1x15 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b3[47]\\d{13}\\b", + "options": { + "case_sensitive": false, + "min_length": 15 + } + } + }, + "tags": { + "type": "card", + "card_type": "amex", + "category": "payment" + } + }, + { + "id": "9d7756e343cefa22a5c098e1092590f806eb5446", + "name": "Basic Authentication Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\bauthorization\\b", + "options": { + "case_sensitive": false, + "min_length": 13 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "^basic\\s+[A-Za-z0-9+/=]+", + "options": { + "case_sensitive": false, + "min_length": 7 + } + } + }, + "tags": { + "type": "basic_auth", + "category": "credentials" + } + }, + { + "id": "mZy8XjZLReC9smpERXWnnw", + "name": "Bearer Authentication Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\bauthorization\\b", + "options": { + "case_sensitive": false, + "min_length": 13 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "^bearer\\s+[-a-z0-9._~+/]{4,}", + "options": { + "case_sensitive": false, + "min_length": 11 + } + } + }, + "tags": { + "type": "bearer_token", + "category": "credentials" + } + }, + { + "id": "450239afc250a19799b6c03dc0e16fd6a4b2a1af", + "name": "Canadian Social Insurance Number Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:social[\\s_]?(?:insurance(?:\\s+number)?)?|SIN|Canadian[\\s_]?(?:social[\\s_]?(?:insurance)?|insurance[\\s_]?number)?)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b\\d{3}-\\d{3}-\\d{3}\\b", + "options": { + "case_sensitive": false, + "min_length": 11 + } + } + }, + "tags": { + "type": "canadian_sin", + "category": "pii" + } + }, + { + "id": "87a879ff33693b46c8a614d8211f5a2c289beca0", + "name": "Digest Authentication Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\bauthorization\\b", + "options": { + "case_sensitive": false, + "min_length": 13 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "^digest\\s+", + "options": { + "case_sensitive": false, + "min_length": 7 + } + } + }, + "tags": { + "type": "digest_auth", + "category": "credentials" + } + }, + { + "id": "qWumeP1GQUa_E4ffAnT-Yg", + "name": "American Express Card Scanner (1x14 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "(?:30[0-59]\\d|3[689]\\d{2})(?:\\d{10})", + "options": { + "case_sensitive": false, + "min_length": 14 + } + } + }, + "tags": { + "type": "card", + "card_type": "diners", + "category": "payment" + } + }, + { + "id": "NlTWWM5LS6W0GSqBLuvtRw", + "name": "Diners Card Scanner (4+4+4+2 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:30[0-59]\\d|3[689]\\d{2})(?:(?:\\s\\d{4}\\s\\d{4}\\s\\d{2})|(?:\\,\\d{4}\\,\\d{4}\\,\\d{2})|(?:-\\d{4}-\\d{4}-\\d{2})|(?:\\.\\d{4}\\.\\d{4}\\.\\d{2}))\\b", + "options": { + "case_sensitive": false, + "min_length": 17 + } + } + }, + "tags": { + "type": "card", + "card_type": "diners", + "category": "payment" + } + }, + { + "id": "Xr5VdbQSTXitYGGiTfxBpw", + "name": "Diners Card Scanner (4+6+4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:30[0-59]\\d|3[689]\\d{2})(?:(?:\\s\\d{6}\\s\\d{4})|(?:\\.\\d{6}\\.\\d{4})|(?:-\\d{6}-\\d{4})|(?:,\\d{6},\\d{4}))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "diners", + "category": "payment" + } + }, + { + "id": "gAbunN_WQNytxu54DjcbAA-mod", + "name": "Diners Card Scanner (8+6 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:30[0-59]\\d{5}|3[689]\\d{6})\\s?(?:(?:\\s\\d{6})|(?:\\,\\d{6})|(?:-\\d{6})|(?:\\.\\d{6}))\\b", + "options": { + "case_sensitive": false, + "min_length": 14 + } + } + }, + "tags": { + "type": "card", + "card_type": "diners", + "category": "payment" + } + }, + { + "id": "9cs4qCfEQBeX17U7AepOvQ", + "name": "MasterCard Scanner (2x8 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:6221(?:2[6-9]|[3-9][0-9])\\d{2}(?:,\\d{8}|\\s\\d{8}|-\\d{8}|\\.\\d{8})|6229(?:[01][0-9]|2[0-5])\\d{2}(?:,\\d{8}|\\s\\d{8}|-\\d{8}|\\.\\d{8})|(?:6011|65\\d{2}|64[4-9]\\d|622[2-8])\\d{4}(?:,\\d{8}|\\s\\d{8}|-\\d{8}|\\.\\d{8}))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "discover", + "category": "payment" + } + }, + { + "id": "YBIDWJIvQWW_TFOyU0CGJg", + "name": "Discover Card Scanner (4x4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:(?:(?:6221(?:2[6-9]|[3-9][0-9])\\d{2}(?:,\\d{4}){2})|(?:6221\\s(?:2[6-9]|[3-9][0-9])\\d{2}(?:\\s\\d{4}){2})|(?:6221\\.(?:2[6-9]|[3-9][0-9])\\d{2}(?:\\.\\d{4}){2})|(?:6221-(?:2[6-9]|[3-9][0-9])\\d{2}(?:-\\d{4}){2}))|(?:(?:6229(?:[01][0-9]|2[0-5])\\d{2}(?:,\\d{4}){2})|(?:6229\\s(?:[01][0-9]|2[0-5])\\d{2}(?:\\s\\d{4}){2})|(?:6229\\.(?:[01][0-9]|2[0-5])\\d{2}(?:\\.\\d{4}){2})|(?:6229-(?:[01][0-9]|2[0-5])\\d{2}(?:-\\d{4}){2}))|(?:(?:6011|65\\d{2}|64[4-9]\\d|622[2-8])(?:(?:\\s\\d{4}){3}|(?:\\.\\d{4}){3}|(?:-\\d{4}){3}|(?:,\\d{4}){3})))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "discover", + "category": "payment" + } + }, + { + "id": "12cpbjtVTMaMutFhh9sojQ", + "name": "Discover Card Scanner (1x16 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:6221(?:2[6-9]|[3-9][0-9])\\d{10}|6229(?:[01][0-9]|2[0-5])\\d{10}|(?:6011|65\\d{2}|64[4-9]\\d|622[2-8])\\d{12})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "discover", + "category": "payment" + } + }, + { + "id": "PuXiVTCkTHOtj0Yad1ppsw", + "name": "Standard E-mail Address", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:(?:e[-\\s]?)?mail|address|sender|\\bto\\b|from|recipient)\\b", + "options": { + "case_sensitive": false, + "min_length": 2 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b[\\w!#$%&'*+/=?`{|}~^-]+(?:\\.[\\w!#$%&'*+/=?`{|}~^-]+)*(%40|@)(?:[a-zA-Z0-9-]+\\.)+[a-zA-Z]{2,6}\\b", + "options": { + "case_sensitive": false, + "min_length": 5 + } + } + }, + "tags": { + "type": "email", + "category": "pii" + } + }, + { + "id": "8VS2RKxzR8a_95L5fuwaXQ", + "name": "IBAN", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:iban|account|sender|receiver)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:NO\\d{2}(?:[ \\-]?\\d{4}){2}[ \\-]?\\d{3}|BE\\d{2}(?:[ \\-]?\\d{4}){3}|(?:DK|FO|FI|GL|SD)\\d{2}(?:[ \\-]?\\d{4}){3}[ \\-]?\\d{2}|NL\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?\\d{4}){2}[ \\-]?\\d{2}|MK\\d{2}[ \\-]?\\d{3}[A-Z0-9](?:[ \\-]?[A-Z0-9]{4}){2}[ \\-]?[A-Z0-9]\\d{2}|SI\\d{17}|(?:AT|BA|EE|LT|XK)\\d{18}|(?:LU|KZ|EE|LT)\\d{5}[A-Z0-9]{13}|LV\\d{2}[A-Z]{4}[A-Z0-9]{13}|(?:LI|CH)\\d{2}[ \\-]?\\d{4}[ \\-]?\\d[A-Z0-9]{3}(?:[ \\-]?[A-Z0-9]{4}){2}[ \\-]?[A-Z0-9]|HR\\d{2}(?:[ \\-]?\\d{4}){4}[ \\-]?\\d|GE\\d{2}[ \\-]?[A-Z0-9]{2}\\d{2}\\d{14}|VA\\d{20}|BG\\d{2}[A-Z]{4}\\d{6}[A-Z0-9]{8}|BH\\d{2}[A-Z]{4}[A-Z0-9]{14}|GB\\d{2}[A-Z]{4}(?:[ \\-]?\\d{4}){3}[ \\-]?\\d{2}|IE\\d{2}[ \\-]?[A-Z0-9]{4}(?:[ \\-]?\\d{4}){3}[ \\-]?\\d{2}|(?:CR|DE|ME|RS)\\d{2}(?:[ \\-]?\\d{4}){4}[ \\-]?\\d{2}|(?:AE|TL|IL)\\d{2}(?:[ \\-]?\\d{4}){4}[ \\-]?\\d{3}|GI\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?[A-Z0-9]{4}){3}[ \\-]?[A-Z0-9]{3}|IQ\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?\\d{4}){3}[ \\-]?\\d{3}|MD\\d{2}(?:[ \\-]?[A-Z0-9]{4}){5}|SA\\d{2}[ \\-]?\\d{2}[A-Z0-9]{2}(?:[ \\-]?[A-Z0-9]{4}){4}|RO\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?[A-Z0-9]{4}){4}|(?:PK|VG)\\d{2}[ \\-]?[A-Z0-9]{4}(?:[ \\-]?\\d{4}){4}|AD\\d{2}(?:[ \\-]?\\d{4}){2}(?:[ \\-]?[A-Z0-9]{4}){3}|(?:CZ|SK|ES|SE|TN)\\d{2}(?:[ \\-]?\\d{4}){5}|(?:LY|PT|ST)\\d{2}(?:[ \\-]?\\d{4}){5}[ \\-]?\\d|TR\\d{2}[ \\-]?\\d{4}[ \\-]?\\d[A-Z0-9]{3}(?:[ \\-]?[A-Z0-9]{4}){3}[ \\-]?[A-Z0-9]{2}|IS\\d{2}(?:[ \\-]?\\d{4}){5}[ \\-]?\\d{2}|(?:IT|SM)\\d{2}[ \\-]?[A-Z]\\d{3}[ \\-]?\\d{4}[ \\-]?\\d{3}[A-Z0-9](?:[ \\-]?[A-Z0-9]{4}){2}[ \\-]?[A-Z0-9]{3}|GR\\d{2}[ \\-]?\\d{4}[ \\-]?\\d{3}[A-Z0-9](?:[ \\-]?[A-Z0-9]{4}){3}[A-Z0-9]{3}|(?:FR|MC)\\d{2}(?:[ \\-]?\\d{4}){2}[ \\-]?\\d{2}[A-Z0-9]{2}(?:[ \\-]?[A-Z0-9]{4}){2}[ \\-]?[A-Z0-9]\\d{2}|MR\\d{2}(?:[ \\-]?\\d{4}){5}[ \\-]?\\d{3}|(?:SV|DO)\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?\\d{4}){5}|BY\\d{2}[ \\-]?[A-Z]{4}[ \\-]?\\d{4}(?:[ \\-]?[A-Z0-9]{4}){4}|GT\\d{2}(?:[ \\-]?[A-Z0-9]{4}){6}|AZ\\d{2}[ \\-]?[A-Z0-9]{4}(?:[ \\-]?\\d{5}){4}|LB\\d{2}[ \\-]?\\d{4}(?:[ \\-]?[A-Z0-9]{5}){4}|(?:AL|CY)\\d{2}(?:[ \\-]?\\d{4}){2}(?:[ \\-]?[A-Z0-9]{4}){4}|(?:HU|PL)\\d{2}(?:[ \\-]?\\d{4}){6}|QA\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?[A-Z0-9]{4}){5}[ \\-]?[A-Z0-9]|PS\\d{2}[ \\-]?[A-Z0-9]{4}(?:[ \\-]?\\d{4}){5}[ \\-]?\\d|UA\\d{2}[ \\-]?\\d{4}[ \\-]?\\d{2}[A-Z0-9]{2}(?:[ \\-]?[A-Z0-9]{4}){4}[ \\-]?[A-Z0-9]|BR\\d{2}(?:[ \\-]?\\d{4}){5}[ \\-]?\\d{3}[A-Z0-9][ \\-]?[A-Z0-9]|EG\\d{2}(?:[ \\-]?\\d{4}){6}\\d|MU\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?\\d{4}){4}\\d{3}[A-Z][ \\-]?[A-Z]{2}|(?:KW|JO)\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?[A-Z0-9]{4}){5}[ \\-]?[A-Z0-9]{2}|MT\\d{2}[ \\-]?[A-Z]{4}[ \\-]?\\d{4}[ \\-]?\\d[A-Z0-9]{3}(?:[ \\-]?[A-Z0-9]{3}){4}[ \\-]?[A-Z0-9]{3}|SC\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?\\d{4}){5}[ \\-]?[A-Z]{3}|LC\\d{2}[ \\-]?[A-Z]{4}(?:[ \\-]?[A-Z0-9]{4}){6})\\b", + "options": { + "case_sensitive": false, + "min_length": 15 + } + } + }, + "tags": { + "type": "iban", + "category": "payment" + } + }, + { + "id": "h6WJcecQTwqvN9KeEtwDvg", + "name": "JCB Card Scanner (1x16 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b35(?:2[89]|[3-9][0-9])(?:\\d{12})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "jcb", + "category": "payment" + } + }, + { + "id": "gcEaMu_VSJ2-bGCEkgyC0w", + "name": "JCB Card Scanner (2x8 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b35(?:2[89]|[3-9][0-9])\\d{4}(?:(?:,\\d{8})|(?:-\\d{8})|(?:\\s\\d{8})|(?:\\.\\d{8}))\\b", + "options": { + "case_sensitive": false, + "min_length": 17 + } + } + }, + "tags": { + "type": "card", + "card_type": "jcb", + "category": "payment" + } + }, + { + "id": "imTliuhXT5GAeRNhqChXQQ", + "name": "JCB Card Scanner (4x4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b35(?:2[89]|[3-9][0-9])(?:(?:\\s\\d{4}){3}|(?:\\.\\d{4}){3}|(?:-\\d{4}){3}|(?:,\\d{4}){3})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "jcb", + "category": "payment" + } + }, + { + "id": "9osY3xc9Q7ONAV0zw9Uz4A", + "name": "JSON Web Token", + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\bey[I-L][\\w=-]+\\.ey[I-L][\\w=-]+(\\.[\\w.+\\/=-]+)?\\b", + "options": { + "case_sensitive": false, + "min_length": 20 + } + } + }, + "tags": { + "type": "json_web_token", + "category": "credentials" + } + }, + { + "id": "d1Q9D3YMRxuVKf6CZInJPw", + "name": "Maestro Card Scanner (1x16 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:5[06-9]\\d{2}|6\\d{3})(?:\\d{12})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "maestro", + "category": "payment" + } + }, + { + "id": "M3YIQKKjRVmoeQuM3pjzrw", + "name": "Maestro Card Scanner (2x8 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:5[06-9]\\d{6}|6\\d{7})(?:\\s\\d{8}|\\.\\d{8}|-\\d{8}|,\\d{8})\\b", + "options": { + "case_sensitive": false, + "min_length": 17 + } + } + }, + "tags": { + "type": "card", + "card_type": "maestro", + "category": "payment" + } + }, + { + "id": "hRxiQBlSSVKcjh5U7LZYLA", + "name": "Maestro Card Scanner (4x4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:5[06-9]\\d{2}|6\\d{3})(?:(?:\\s\\d{4}){3}|(?:\\.\\d{4}){3}|(?:-\\d{4}){3}|(?:,\\d{4}){3})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "maestro", + "category": "payment" + } + }, + { + "id": "NwhIYNS4STqZys37WlaIKA", + "name": "MasterCard Scanner (2x8 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:(?:5[1-5]\\d{2})|(?:222[1-9])|(?:22[3-9]\\d)|(?:2[3-6]\\d{2})|(?:27[0-1]\\d)|(?:2720))(?:(?:\\d{4}(?:(?:,\\d{8})|(?:-\\d{8})|(?:\\s\\d{8})|(?:\\.\\d{8}))))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "mastercard", + "category": "payment" + } + }, + { + "id": "axxJkyjhRTOuhjwlsA35Vw", + "name": "MasterCard Scanner (4x4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:(?:5[1-5]\\d{2})|(?:222[1-9])|(?:22[3-9]\\d)|(?:2[3-6]\\d{2})|(?:27[0-1]\\d)|(?:2720))(?:(?:\\s\\d{4}){3}|(?:\\.\\d{4}){3}|(?:-\\d{4}){3}|(?:,\\d{4}){3})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "mastercard", + "category": "payment" + } + }, + { + "id": "76EhmoK3TPqJcpM-fK0pLw", + "name": "MasterCard Scanner (1x16 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:(?:5[1-5]\\d{2})|(?:222[1-9])|(?:22[3-9]\\d)|(?:2[3-6]\\d{2})|(?:27[0-1]\\d)|(?:2720))(?:\\d{12})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "mastercard", + "category": "payment" + } + }, + { + "id": "de0899e0cbaaa812bb624cf04c912071012f616d-mod", + "name": "UK National Insurance Number Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "^nin$|\\binsurance\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b[A-Z]{2}[\\s-]?\\d{6}[\\s-]?[A-Z]?\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + } + }, + "tags": { + "type": "uk_nin", + "category": "pii" + } + }, + { + "id": "d962f7ddb3f55041e39195a60ff79d4814a7c331", + "name": "US Passport Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\bpassport\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b[0-9A-Z]{9}\\b|\\b[0-9]{6}[A-Z][0-9]{2}\\b", + "options": { + "case_sensitive": false, + "min_length": 8 + } + } + }, + "tags": { + "type": "passport_number", + "category": "pii" + } + }, + { + "id": "7771fc3b-b205-4b93-bcef-28608c5c1b54", + "name": "United States Social Security Number Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:SSN|(?:(?:social)?[\\s_]?(?:security)?[\\s_]?(?:number)?)?)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b\\d{3}[-\\s\\.]{1}\\d{2}[-\\s\\.]{1}\\d{4}\\b", + "options": { + "case_sensitive": false, + "min_length": 11 + } + } + }, + "tags": { + "type": "us_ssn", + "category": "pii" + } + }, + { + "id": "ac6d683cbac77f6e399a14990793dd8fd0fca333", + "name": "US Vehicle Identification Number Scanner", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:vehicle[_\\s-]*identification[_\\s-]*number|vin)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b[A-HJ-NPR-Z0-9]{17}\\b", + "options": { + "case_sensitive": false, + "min_length": 17 + } + } + }, + "tags": { + "type": "vin", + "category": "pii" + } + }, + { + "id": "wJIgOygRQhKkR69b_9XbRQ", + "name": "Visa Card Scanner (2x8 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b4\\d{3}(?:(?:\\d{4}(?:(?:,\\d{8})|(?:-\\d{8})|(?:\\s\\d{8})|(?:\\.\\d{8}))))\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "visa", + "category": "payment" + } + }, + { + "id": "0o71SJxXQNK7Q6gMbBesFQ", + "name": "Visa Card Scanner (4x4 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "\\b4\\d{3}(?:(?:,\\d{4}){3}|(?:\\s\\d{4}){3}|(?:\\.\\d{4}){3}|(?:-\\d{4}){3})\\b", + "options": { + "case_sensitive": false, + "min_length": 16 + } + } + }, + "tags": { + "type": "card", + "card_type": "visa", + "category": "payment" + } + }, + { + "id": "QrHD6AfgQm6z-j0wStxTvA", + "name": "Visa Card Scanner (1x15 & 1x16 & 1x19 digits)", + "key": { + "operator": "match_regex", + "parameters": { + "regex": "\\b(?:card|cc|credit|debit|payment|amex|visa|mastercard|maestro|discover|jcb|diner)\\b", + "options": { + "case_sensitive": false, + "min_length": 3 + } + } + }, + "value": { + "operator": "match_regex", + "parameters": { + "regex": "4[0-9]{12}(?:[0-9]{3})?", + "options": { + "case_sensitive": false, + "min_length": 13 + } + } + }, + "tags": { + "type": "card", + "card_type": "visa", + "category": "payment" + } + } ] -} +} \ No newline at end of file diff --git a/packages/dd-trace/src/appsec/reporter.js b/packages/dd-trace/src/appsec/reporter.js index d22613c749a..409b96fa85b 100644 --- a/packages/dd-trace/src/appsec/reporter.js +++ b/packages/dd-trace/src/appsec/reporter.js @@ -151,10 +151,8 @@ function reportSchemas (derivatives) { const tags = {} for (const [address, value] of Object.entries(derivatives)) { - if (address.startsWith('_dd.appsec.s.req')) { - const gzippedValue = zlib.gzipSync(JSON.stringify(value)) - tags[address] = gzippedValue.toString('base64') - } + const gzippedValue = zlib.gzipSync(JSON.stringify(value)) + tags[address] = gzippedValue.toString('base64') } rootSpan.addTags(tags) diff --git a/packages/dd-trace/test/appsec/reporter.spec.js b/packages/dd-trace/test/appsec/reporter.spec.js index 36f1e1b5276..6cb435651c6 100644 --- a/packages/dd-trace/test/appsec/reporter.spec.js +++ b/packages/dd-trace/test/appsec/reporter.spec.js @@ -292,7 +292,7 @@ describe('reporter', () => { expect(span.addTags).to.be.calledOnceWithExactly({}) }) - it('should call addTags with matched tags', () => { + it('should call addTags', () => { const schemaValue = [{ 'key': [8] }] const derivatives = { '_dd.appsec.s.req.headers': schemaValue, @@ -311,7 +311,8 @@ describe('reporter', () => { '_dd.appsec.s.req.query': schemaEncoded, '_dd.appsec.s.req.params': schemaEncoded, '_dd.appsec.s.req.cookies': schemaEncoded, - '_dd.appsec.s.req.body': schemaEncoded + '_dd.appsec.s.req.body': schemaEncoded, + 'custom.processor.output': schemaEncoded }) }) }) From 1ebb4742de53c55e71c285f043fb9132a630224d Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Tue, 12 Dec 2023 09:24:42 -0800 Subject: [PATCH 104/147] manual logging and garbage collection of old spans (#3849) --- LICENSE-3rdparty.csv | 1 + integration-tests/memory-leak/index.js | 15 ++++ package.json | 3 +- packages/dd-trace/src/config.js | 8 ++ packages/dd-trace/src/opentracing/span.js | 2 + packages/dd-trace/src/proxy.js | 10 +++ packages/dd-trace/src/spanleak.js | 98 +++++++++++++++++++++++ yarn.lock | 5 ++ 8 files changed, 141 insertions(+), 1 deletion(-) create mode 100644 integration-tests/memory-leak/index.js create mode 100644 packages/dd-trace/src/spanleak.js diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 15404050720..887ad2a6294 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -30,6 +30,7 @@ require,opentracing,MIT,Copyright 2016 Resonance Labs Inc require,path-to-regexp,MIT,Copyright 2014 Blake Embrey require,pprof-format,MIT,Copyright 2022 Stephen Belanger require,protobufjs,BSD-3-Clause,Copyright 2016 Daniel Wirtz +require,tlhunter-sorted-set,MIT,Copyright (c) 2023 Datadog Inc. require,retry,MIT,Copyright 2011 Tim Koschützki Felix Geisendörfer require,semver,ISC,Copyright Isaac Z. Schlueter and Contributors dev,@types/node,MIT,Copyright Authors diff --git a/integration-tests/memory-leak/index.js b/integration-tests/memory-leak/index.js new file mode 100644 index 00000000000..01d4c2c439e --- /dev/null +++ b/integration-tests/memory-leak/index.js @@ -0,0 +1,15 @@ +const tracer = require('../../') +tracer.init() + +const http = require('http') + +http.createServer((req, res) => { + const delay = Math.random() < 0.01 // 1% + ? 61 * 1000 // over 1 minute + : Math.random() * 1000 // random 0 - 1s + + setTimeout(() => { + res.write('Hello World!') + res.end() + }, delay) +}).listen(8080) diff --git a/package.json b/package.json index bcb6d20ba98..3e59b71a937 100644 --- a/package.json +++ b/package.json @@ -97,8 +97,9 @@ "node-abort-controller": "^3.1.1", "opentracing": ">=0.12.1", "path-to-regexp": "^0.1.2", - "pprof-format": "^2.0.7", + "pprof-format": "^2.0.7", "protobufjs": "^7.2.5", + "tlhunter-sorted-set": "^0.1.0", "retry": "^0.13.1", "semver": "^7.5.4" }, diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 5729e4761de..bc03636f6ef 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -540,6 +540,12 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) true ) + // 0: disabled, 1: logging, 2: garbage collection + logging + const DD_TRACE_SPAN_LEAK_DEBUG = coalesce( + process.env.DD_TRACE_SPAN_LEAK_DEBUG, + 0 + ) + const ingestion = options.ingestion || {} const dogstatsd = coalesce(options.dogstatsd, {}) const sampler = { @@ -722,6 +728,8 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this.isGCPFunction = isGCPFunction this.isAzureFunctionConsumptionPlan = isAzureFunctionConsumptionPlan + this.spanLeakDebug = Number(DD_TRACE_SPAN_LEAK_DEBUG) + tagger.add(this.tags, { service: this.service, env: this.env, diff --git a/packages/dd-trace/src/opentracing/span.js b/packages/dd-trace/src/opentracing/span.js index 86e0c5d12ed..5ba2149503e 100644 --- a/packages/dd-trace/src/opentracing/span.js +++ b/packages/dd-trace/src/opentracing/span.js @@ -13,6 +13,7 @@ const log = require('../log') const { storage } = require('../../../datadog-core') const telemetryMetrics = require('../telemetry/metrics') const { channel } = require('dc-polyfill') +const spanleak = require('../spanleak') const tracerMetrics = telemetryMetrics.manager.namespace('tracers') @@ -90,6 +91,7 @@ class DatadogSpan { unfinishedRegistry.register(this, operationName, this) } + spanleak.addSpan(this, operationName) } toString () { diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index 2919ad9483b..91be1fe9ad7 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -10,6 +10,7 @@ const PluginManager = require('./plugin_manager') const remoteConfig = require('./appsec/remote_config') const AppsecSdk = require('./appsec/sdk') const dogstatsd = require('./dogstatsd') +const spanleak = require('./spanleak') class Tracer extends NoopProxy { constructor () { @@ -37,6 +38,15 @@ class Tracer extends NoopProxy { }, 10 * 1000).unref() } + if (config.spanLeakDebug > 0) { + if (config.spanLeakDebug === spanleak.MODES.LOG) { + spanleak.enableLogging() + } else if (config.spanLeakDebug === spanleak.MODES.GC_AND_LOG) { + spanleak.enableGarbageCollection() + } + spanleak.startScrubber() + } + if (config.remoteConfig.enabled && !config.isCiVisibility) { const rc = remoteConfig.enable(config) diff --git a/packages/dd-trace/src/spanleak.js b/packages/dd-trace/src/spanleak.js new file mode 100644 index 00000000000..bfded4d8d3e --- /dev/null +++ b/packages/dd-trace/src/spanleak.js @@ -0,0 +1,98 @@ +'use strict' + +/* eslint-disable no-console */ + +const SortedSet = require('tlhunter-sorted-set') + +const INTERVAL = 1000 // look for expired spans every 1s +const LIFETIME = 60 * 1000 // all spans have a max lifetime of 1m + +const MODES = { + DISABLED: 0, + // METRICS_ONLY + LOG: 1, + GC_AND_LOG: 2 + // GC +} + +module.exports.MODES = MODES + +const spans = new SortedSet() + +// TODO: should these also be delivered as runtime metrics? + +// const registry = new FinalizationRegistry(name => { +// spans.del(span) // there is no span +// }) + +let interval +let mode = MODES.DISABLED + +module.exports.disable = function () { + mode = MODES.DISABLED +} + +module.exports.enableLogging = function () { + mode = MODES.LOG +} + +module.exports.enableGarbageCollection = function () { + mode = MODES.GC_AND_LOG +} + +module.exports.startScrubber = function () { + if (!isEnabled()) return + + interval = setInterval(() => { + const now = Date.now() + const expired = spans.rangeByScore(0, now) + + if (!expired.length) return + + const gc = isGarbageCollecting() + + const expirationsByType = Object.create(null) // { [spanType]: count } + + for (const wrapped of expired) { + spans.del(wrapped) + const span = wrapped.deref() + + if (!span) continue // span has already been garbage collected + + // TODO: Should we also do things like record the route to help users debug leaks? + if (!expirationsByType[span._name]) expirationsByType[span._name] = 0 + expirationsByType[span._name]++ + + if (!gc) continue // everything after this point is related to manual GC + + // TODO: what else can we do to alleviate memory usage + span.context()._tags = Object.create(null) + } + + console.log('expired spans:' + + Object.keys(expirationsByType).reduce((a, c) => `${a} ${c}: ${expirationsByType[c]}`, '')) + }, INTERVAL) +} + +module.exports.stopScrubber = function () { + clearInterval(interval) +} + +module.exports.addSpan = function (span) { + if (!isEnabled()) return + + const now = Date.now() + const expiration = now + LIFETIME + // eslint-disable-next-line no-undef + const wrapped = new WeakRef(span) + spans.add(wrapped, expiration) + // registry.register(span, span._name) +} + +function isEnabled () { + return mode > MODES.DISABLED +} + +function isGarbageCollecting () { + return mode >= MODES.GC_AND_LOG +} diff --git a/yarn.lock b/yarn.lock index fa4a7e11d31..895312d70b4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -4943,6 +4943,11 @@ timestring@^6.0.0: resolved "https://registry.npmjs.org/timestring/-/timestring-6.0.0.tgz" integrity sha512-wMctrWD2HZZLuIlchlkE2dfXJh7J2KDI9Dwl+2abPYg0mswQHfOAyQW3jJg1pY5VfttSINZuKcXoB3FGypVklA== +tlhunter-sorted-set@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/tlhunter-sorted-set/-/tlhunter-sorted-set-0.1.0.tgz#1c3eae28c0fa4dff97e9501d2e3c204b86406f4b" + integrity sha512-eGYW4bjf1DtrHzUYxYfAcSytpOkA44zsr7G2n3PV7yOUR23vmkGe3LL4R+1jL9OsXtbsFOwe8XtbCrabeaEFnw== + to-fast-properties@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" From 04d5d761847e01a371a161b2ca3e999a46a670f0 Mon Sep 17 00:00:00 2001 From: William Conti <58711692+wconti27@users.noreply.github.com> Date: Tue, 12 Dec 2023 14:00:23 -0500 Subject: [PATCH 105/147] chore: fix aws-sdk failing test suite (#3860) * fix aws-sdk plugin tests --- .github/workflows/plugins.yml | 23 +++++++++++++++--- docker-compose.yml | 24 ++++++++++++++++--- .../test/kinesis.spec.js | 5 ++-- .../test/kinesis_helpers.js | 10 +++++--- .../test/lambda.spec.js | 2 +- .../datadog-plugin-aws-sdk/test/s3.spec.js | 2 +- 6 files changed, 53 insertions(+), 13 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 2e2aa3b5764..7fdf623df79 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -161,9 +161,9 @@ jobs: runs-on: ubuntu-latest services: localstack: - image: localstack/localstack:1.1.0 + image: localstack/localstack:3.0.2 env: - LOCALSTACK_SERVICES: dynamodb,kinesis,s3,sqs,sns,redshift,route53,logs,serverless + LOCALSTACK_SERVICES: dynamodb,kinesis,s3,sqs,sns,redshift,route53,logs,serverless,lambda EXTRA_CORS_ALLOWED_HEADERS: x-amz-request-id,x-amzn-requestid,x-amz-id-2 EXTRA_CORS_EXPOSE_HEADERS: x-amz-request-id,x-amzn-requestid,x-amz-id-2 AWS_DEFAULT_REGION: us-east-1 @@ -172,9 +172,26 @@ jobs: START_WEB: '0' ports: - 4566:4566 + # we have two localstacks since upgrading localstack was causing lambda & S3 tests to fail + # To-Do: Debug localstack / lambda and localstack / S3 + localstack-legacy: + image: localstack/localstack:1.1.0 + ports: + - "127.0.0.1:4567:4567" # Edge + env: + LOCALSTACK_SERVICES: dynamodb,kinesis,s3,sqs,sns,redshift,route53,logs,serverless + EXTRA_CORS_ALLOWED_HEADERS: x-amz-request-id,x-amzn-requestid,x-amz-id-2 + EXTRA_CORS_EXPOSE_HEADERS: x-amz-request-id,x-amzn-requestid,x-amz-id-2 + AWS_DEFAULT_REGION: us-east-1 + FORCE_NONINTERACTIVE: 'true' + LAMBDA_EXECUTOR: local + START_WEB: '0' + GATEWAY_LISTEN: 127.0.0.1:4567 + EDGE_PORT: 4567 + EDGE_PORT_HTTP: 4567 env: PLUGINS: aws-sdk - SERVICES: localstack + SERVICES: localstack localstack-legacy steps: - uses: actions/checkout@v2 - uses: ./.github/actions/testagent/start diff --git a/docker-compose.yml b/docker-compose.yml index 2ff0e15120a..ed2cb8dfda9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -87,11 +87,26 @@ services: ports: - "127.0.0.1:8081:8081" localstack: - # TODO: Figure out why SNS doesn't work in >=1.2 - # https://github.com/localstack/localstack/issues/7479 - image: localstack/localstack:1.1.0 + image: localstack/localstack:3.0.2 ports: - "127.0.0.1:4566:4566" # Edge + environment: + - LOCALSTACK_SERVICES=dynamodb,kinesis,s3,sqs,sns,redshift,route53,logs,serverless,lambda + - EXTRA_CORS_ALLOWED_HEADERS=x-amz-request-id,x-amzn-requestid,x-amz-id-2 + - EXTRA_CORS_EXPOSE_HEADERS=x-amz-request-id,x-amzn-requestid,x-amz-id-2 + - AWS_DEFAULT_REGION=us-east-1 + - FORCE_NONINTERACTIVE=true + - START_WEB=0 + - DEBUG=${DEBUG-} + - DOCKER_HOST=unix:///var/run/docker.sock + volumes: + - "/var/run/docker.sock:/var/run/docker.sock" + localstack-legacy: + # we have two localstacks since upgrading localstack was causing lambda & S3 tests to fail + # To-Do: Debug localstack / lambda and localstack / S3 + image: localstack/localstack:1.1.0 + ports: + - "127.0.0.1:4567:4567" # Edge environment: - LOCALSTACK_SERVICES=dynamodb,kinesis,s3,sqs,sns,redshift,route53,logs,serverless - EXTRA_CORS_ALLOWED_HEADERS=x-amz-request-id,x-amzn-requestid,x-amz-id-2 @@ -99,6 +114,9 @@ services: - AWS_DEFAULT_REGION=us-east-1 - FORCE_NONINTERACTIVE=true - START_WEB=0 + - GATEWAY_LISTEN=127.0.0.1:4567 + - EDGE_PORT=4567 + - EDGE_PORT_HTTP=4567 - LAMBDA_EXECUTOR=local kafka: image: debezium/kafka:1.7 diff --git a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js index 41d76d61236..db8177370c0 100644 --- a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js @@ -19,7 +19,8 @@ describe('Kinesis', () => { return agent.load('aws-sdk') }) - before(done => { + before(function (done) { + this.timeout(0) AWS = require(`../../../versions/${kinesisClientName}@${version}`).get() const params = { @@ -40,7 +41,7 @@ describe('Kinesis', () => { }, (err, res) => { if (err) return done(err) - helpers.waitForActiveStream(kinesis, done) + helpers.waitForActiveStream(this, kinesis, done) }) }) diff --git a/packages/datadog-plugin-aws-sdk/test/kinesis_helpers.js b/packages/datadog-plugin-aws-sdk/test/kinesis_helpers.js index f9f61ada0bf..f76e6119251 100644 --- a/packages/datadog-plugin-aws-sdk/test/kinesis_helpers.js +++ b/packages/datadog-plugin-aws-sdk/test/kinesis_helpers.js @@ -45,13 +45,17 @@ function putTestRecord (kinesis, data, cb) { }, cb) } -function waitForActiveStream (kinesis, cb) { +function waitForActiveStream (mocha, kinesis, cb) { kinesis.describeStream({ StreamName: 'MyStream' }, (err, data) => { - if (err) return waitForActiveStream(kinesis, cb) + if (err) { + mocha.timeout(2000) + return waitForActiveStream(mocha, kinesis, cb) + } if (data.StreamDescription.StreamStatus !== 'ACTIVE') { - return waitForActiveStream(kinesis, cb) + mocha.timeout(2000) + return waitForActiveStream(mocha, kinesis, cb) } cb() diff --git a/packages/datadog-plugin-aws-sdk/test/lambda.spec.js b/packages/datadog-plugin-aws-sdk/test/lambda.spec.js index 2bfafed17e7..4ccf3b8b46e 100644 --- a/packages/datadog-plugin-aws-sdk/test/lambda.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/lambda.spec.js @@ -40,7 +40,7 @@ describe('Plugin', () => { before(done => { AWS = require(`../../../versions/${lambdaClientName}@${version}`).get() - lambda = new AWS.Lambda({ endpoint: 'http://127.0.0.1:4566', region: 'us-east-1' }) + lambda = new AWS.Lambda({ endpoint: 'http://127.0.0.1:4567', region: 'us-east-1' }) lambda.createFunction({ FunctionName: 'ironmaiden', Code: { ZipFile }, diff --git a/packages/datadog-plugin-aws-sdk/test/s3.spec.js b/packages/datadog-plugin-aws-sdk/test/s3.spec.js index 21165ce7b3f..ab1e3911047 100644 --- a/packages/datadog-plugin-aws-sdk/test/s3.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/s3.spec.js @@ -37,7 +37,7 @@ describe('Plugin', () => { before(done => { AWS = require(`../../../versions/${s3ClientName}@${version}`).get() - s3 = new AWS.S3({ endpoint: 'http://127.0.0.1:4566', s3ForcePathStyle: true, region: 'us-east-1' }) + s3 = new AWS.S3({ endpoint: 'http://127.0.0.1:4567', s3ForcePathStyle: true, region: 'us-east-1' }) s3.createBucket({ Bucket: bucketName }, (err) => { if (err) return done(err) done() From 65631c800e3442dfe57e76dae36b7244d91a022e Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Tue, 12 Dec 2023 11:11:19 -0800 Subject: [PATCH 106/147] create a security policy via SECURITY.md (#3863) - this is a convention used by GitHub --- SECURITY.md | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 SECURITY.md diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000000..2061bbe0d09 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,17 @@ +# Security Policy + +This document outlines the security policy for the Datadog Node.js Tracer (aka `dd-trace-js`) and what to do if you discover a security vulnerability in the project. +Most notably, please do not share the details in a public forum (such as in a discussion, issue, or pull request) but instead reach out to us with the details. +This gives us an opportunity to release a fix for others to benefit from by the time details are made public. + + +## Supported Versions + +We accept vulnerability submissions for any [currently maintained release lines](https://github.com/DataDog/dd-trace-js#version-release-lines-and-maintenance). + + +## Reporting a Vulnerability + +If you discover a vulnerability in the Datadog Node.js Tracer (or any Datadog product for that matter) please submit details to the following email address: + +* [security@datadoghq.com](mailto:security@datadoghq.com) From 54f0e5a0def215b173ed8c3cbf94e41fabd3859d Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Wed, 13 Dec 2023 11:43:17 +0100 Subject: [PATCH 107/147] Apply new rules for header injection detection to prevent false positives (#3867) --- .../analyzers/header-injection-analyzer.js | 23 +++++++++++++------ .../header-injection.express.plugin.spec.js | 16 +++++++++++++ 2 files changed, 32 insertions(+), 7 deletions(-) diff --git a/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js index 73ac404f5a5..3e622016858 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js @@ -44,9 +44,14 @@ class HeaderInjectionAnalyzer extends InjectionAnalyzer { if (this.isExcludedHeaderName(lowerCasedHeaderName) || typeof value !== 'string') return - return super._isVulnerable(value, iastContext) && - !(this.isCookieExclusion(lowerCasedHeaderName, value, iastContext) || - this.isAccessControlAllowOriginExclusion(lowerCasedHeaderName, value, iastContext)) + const ranges = getRanges(iastContext, value) + if (ranges?.length > 0) { + return !(this.isCookieExclusion(lowerCasedHeaderName, ranges) || + this.isSameHeaderExclusion(lowerCasedHeaderName, ranges) || + this.isAccessControlAllowOriginExclusion(lowerCasedHeaderName, ranges)) + } + + return false } _getEvidence (headerInfo, iastContext) { @@ -70,24 +75,28 @@ class HeaderInjectionAnalyzer extends InjectionAnalyzer { return EXCLUDED_HEADER_NAMES.includes(name) } - isCookieExclusion (name, value, iastContext) { + isCookieExclusion (name, ranges) { if (name === 'set-cookie') { - return getRanges(iastContext, value) + return ranges .every(range => range.iinfo.type === HTTP_REQUEST_COOKIE_VALUE || range.iinfo.type === HTTP_REQUEST_COOKIE_NAME) } return false } - isAccessControlAllowOriginExclusion (name, value, iastContext) { + isAccessControlAllowOriginExclusion (name, ranges) { if (name === 'access-control-allow-origin') { - return getRanges(iastContext, value) + return ranges .every(range => range.iinfo.type === HTTP_REQUEST_HEADER_VALUE) } return false } + isSameHeaderExclusion (name, ranges) { + return ranges.length === 1 && name === ranges[0].iinfo.parameterName?.toLowerCase() + } + _getExcludedPaths () { return EXCLUDED_PATHS } diff --git a/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js index 3d825997654..dcddd88f869 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js @@ -83,6 +83,22 @@ describe('Header injection vulnerability', () => { vulnerability: 'HEADER_INJECTION' }) + testThatRequestHasNoVulnerability({ + testDescription: 'should not have HEADER_INJECTION vulnerability ' + + 'when is the header same header', + fn: (req, res) => { + setHeaderFunction('testheader', req.get('testheader'), res) + }, + vulnerability: 'HEADER_INJECTION', + makeRequest: (done, config) => { + return axios.get(`http://localhost:${config.port}/`, { + headers: { + testheader: 'headerValue' + } + }).catch(done) + } + }) + testThatRequestHasNoVulnerability({ testDescription: 'should not have HEADER_INJECTION vulnerability when the header value is not tainted', fn: (req, res) => { From a5d0c47430f4e0e2e4edf94ecc65a2294f972438 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 13 Dec 2023 07:55:44 -0800 Subject: [PATCH 108/147] restify: resolve 0th promise arg, not arguments (#3818) --- .../datadog-instrumentations/src/restify.js | 2 +- .../datadog-plugin-restify/test/index.spec.js | 37 +++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/packages/datadog-instrumentations/src/restify.js b/packages/datadog-instrumentations/src/restify.js index 2644f916b3f..8d0cc70fa0f 100644 --- a/packages/datadog-instrumentations/src/restify.js +++ b/packages/datadog-instrumentations/src/restify.js @@ -55,7 +55,7 @@ function wrapFn (fn) { return result.then(function () { nextChannel.publish({ req }) finishChannel.publish({ req }) - return arguments + return arguments[0] }).catch(function (error) { errorChannel.publish({ req, error }) nextChannel.publish({ req }) diff --git a/packages/datadog-plugin-restify/test/index.spec.js b/packages/datadog-plugin-restify/test/index.spec.js index ea7a5f17aa7..bb96b34a132 100644 --- a/packages/datadog-plugin-restify/test/index.spec.js +++ b/packages/datadog-plugin-restify/test/index.spec.js @@ -114,6 +114,43 @@ describe('Plugin', () => { }) }) + it('should route without producing any warnings', done => { + const warningSpy = sinon.spy((_, msg) => { + // eslint-disable-next-line no-console + console.error(`route called with warning: ${msg}`) + }) + + const server = restify.createServer({ + log: { + trace: () => {}, + warn: warningSpy + } + }) + + server.get( + '/user/:id', + async function middleware () {}, + async function handler (req, res) { + res.send('hello, ' + req.params.id) + } + ) + + getPort().then(port => { + agent + .use(traces => { + expect(warningSpy).to.not.have.been.called + }) + .then(done) + .catch(done) + + appListener = server.listen(port, 'localhost', () => { + axios + .get(`http://localhost:${port}/user/123`) + .catch(done) + }) + }) + }) + it('should run handlers in the request scope', done => { const server = restify.createServer() const interval = setInterval(() => { From 3d2ef8897fe28a8f933c66a2b7a22c0d5c40ff15 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Wed, 13 Dec 2023 19:20:43 +0100 Subject: [PATCH 109/147] PROF-8523: Bugfix and integration test for Net timeline events (#3870) --- integration-tests/profiler.spec.js | 175 +++++++++++------- integration-tests/profiler/dnstest.js | 3 - integration-tests/profiler/nettest.js | 36 ++++ .../src/profiling/profilers/events.js | 2 +- 4 files changed, 150 insertions(+), 66 deletions(-) create mode 100644 integration-tests/profiler/nettest.js diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index 1f83671d730..41ba92cd6a1 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -10,6 +10,7 @@ const path = require('path') const { assert } = require('chai') const fs = require('node:fs/promises') const fsync = require('node:fs') +const net = require('node:net') const zlib = require('node:zlib') const { Profile } = require('pprof-format') @@ -63,6 +64,74 @@ async function getLatestProfile (cwd, pattern) { const pprofUnzipped = zlib.gunzipSync(pprofGzipped) return Profile.decode(pprofUnzipped) } + +async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, threadName, args) { + const procStart = BigInt(Date.now() * 1000000) + const proc = fork(path.join(cwd, scriptFilePath), args, { + cwd, + env: { + DD_PROFILING_PROFILERS: 'wall', + DD_PROFILING_EXPORTERS: 'file', + DD_PROFILING_ENABLED: 1, + DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED: 1 + } + }) + + await processExitPromise(proc, 5000) + const procEnd = BigInt(Date.now() * 1000000) + + const prof = await getLatestProfile(cwd, /^events_.+\.pprof$/) + + const strings = prof.stringTable + const tsKey = strings.dedup('end_timestamp_ns') + const eventKey = strings.dedup('event') + const hostKey = strings.dedup('host') + const addressKey = strings.dedup('address') + const portKey = strings.dedup('port') + const threadNameKey = strings.dedup('thread name') + const nameKey = strings.dedup('operation') + const eventValue = strings.dedup(eventType) + const events = [] + const threadNamePrefix = `Main ${threadName}-` + for (const sample of prof.sample) { + let ts, event, host, address, port, name, threadName + for (const label of sample.label) { + switch (label.key) { + case tsKey: ts = label.num; break + case nameKey: name = label.str; break + case eventKey: event = label.str; break + case hostKey: host = label.str; break + case addressKey: address = label.str; break + case portKey: port = label.num; break + case threadNameKey: threadName = label.str; break + default: assert.fail(`Unexpected label key ${label.key} ${strings.strings[label.key]}`) + } + } + // Timestamp must be defined and be between process start and end time + assert.isDefined(ts) + assert.isTrue(ts <= procEnd) + assert.isTrue(ts >= procStart) + // Gather only DNS events; ignore sporadic GC events + if (event === eventValue) { + assert.isTrue(strings.strings[threadName].startsWith(threadNamePrefix)) + assert.isDefined(name) + // Exactly one of these is defined + assert.isTrue(!!address !== !!host) + const ev = { name: strings.strings[name] } + if (address) { + ev.address = strings.strings[address] + } else { + ev.host = strings.strings[host] + } + if (port) { + ev.port = port + } + events.push(ev) + } + } + return events +} + describe('profiler', () => { let agent let proc @@ -178,68 +247,7 @@ describe('profiler', () => { }) it('dns timeline events work', async () => { - const procStart = BigInt(Date.now() * 1000000) - const proc = fork(path.join(cwd, 'profiler/dnstest.js'), { - cwd, - env: { - DD_PROFILING_PROFILERS: 'wall', - DD_PROFILING_EXPORTERS: 'file', - DD_PROFILING_ENABLED: 1, - DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED: 1 - } - }) - - await processExitPromise(proc, 5000) - const procEnd = BigInt(Date.now() * 1000000) - - const prof = await getLatestProfile(cwd, /^events_.+\.pprof$/) - assert.isAtLeast(prof.sample.length, 5) - - const strings = prof.stringTable - const tsKey = strings.dedup('end_timestamp_ns') - const eventKey = strings.dedup('event') - const hostKey = strings.dedup('host') - const addressKey = strings.dedup('address') - const portKey = strings.dedup('port') - const threadNameKey = strings.dedup('thread name') - const nameKey = strings.dedup('operation') - const dnsEventValue = strings.dedup('dns') - const dnsEvents = [] - for (const sample of prof.sample) { - let ts, event, host, address, port, name, threadName - for (const label of sample.label) { - switch (label.key) { - case tsKey: ts = label.num; break - case nameKey: name = label.str; break - case eventKey: event = label.str; break - case hostKey: host = label.str; break - case addressKey: address = label.str; break - case portKey: port = label.num; break - case threadNameKey: threadName = label.str; break - default: assert.fail(`Unexpected label key ${label.key} ${strings.strings[label.key]}`) - } - } - // Timestamp must be defined and be between process start and end time - assert.isDefined(ts) - assert.isTrue(ts <= procEnd) - assert.isTrue(ts >= procStart) - // Gather only DNS events; ignore sporadic GC events - if (event === dnsEventValue) { - // Thread name must be defined and exactly equal "Main DNS" - assert.isTrue(strings.strings[threadName].startsWith('Main DNS-')) - assert.isDefined(name) - // Exactly one of these is defined - assert.isTrue(!!address !== !!host) - const ev = { name: strings.strings[name] } - if (address) { - ev.address = strings.strings[address] - ev.port = port - } else { - ev.host = strings.strings[host] - } - dnsEvents.push(ev) - } - } + const dnsEvents = await gatherNetworkTimelineEvents(cwd, 'profiler/dnstest.js', 'dns', 'DNS') assert.sameDeepMembers(dnsEvents, [ { name: 'lookup', host: 'example.org' }, { name: 'lookup', host: 'example.com' }, @@ -249,6 +257,49 @@ describe('profiler', () => { ]) }) + it('net timeline events work', async () => { + // Simple server that writes a constant message to the socket. + const msg = 'cya later!\n' + function createServer () { + const server = net.createServer((socket) => { + socket.end(msg, 'utf8') + }).on('error', (err) => { + throw err + }) + return server + } + // Create two instances of the server + const server1 = createServer() + try { + const server2 = createServer() + try { + // Have the servers listen on ephemeral ports + const p = new Promise(resolve => { + server1.listen(0, () => { + server2.listen(0, async () => { + resolve([server1.address().port, server2.address().port]) + }) + }) + }) + const [ port1, port2 ] = await p + const args = [String(port1), String(port2), msg] + // Invoke the profiled program, passing it the ports of the servers and + // the expected message. + const events = await gatherNetworkTimelineEvents(cwd, 'profiler/nettest.js', 'net', 'Net', args) + // The profiled program should have two TCP connection events to the two + // servers. + assert.sameDeepMembers(events, [ + { name: 'connect', host: '127.0.0.1', port: port1 }, + { name: 'connect', host: '127.0.0.1', port: port2 } + ]) + } finally { + server2.close() + } + } finally { + server1.close() + } + }) + context('shutdown', () => { beforeEach(async () => { agent = await new FakeAgent().start() diff --git a/integration-tests/profiler/dnstest.js b/integration-tests/profiler/dnstest.js index 4af0f00750e..36398cb2a05 100644 --- a/integration-tests/profiler/dnstest.js +++ b/integration-tests/profiler/dnstest.js @@ -8,6 +8,3 @@ require('dd-trace').init().profilerStarted().then(() => { dns.resolve4('datadoghq.com', () => {}) dns.lookup('dfslkgsjkrtgrdg.com', () => {}) }) - -// Give the event processor chance to collect events -setTimeout(() => {}, 3000) diff --git a/integration-tests/profiler/nettest.js b/integration-tests/profiler/nettest.js new file mode 100644 index 00000000000..b98bc7d55f3 --- /dev/null +++ b/integration-tests/profiler/nettest.js @@ -0,0 +1,36 @@ +const net = require('node:net') +const process = require('node:process') + +async function streamToString (stream) { + const chunks = [] + + for await (const chunk of stream) { + chunks.push(Buffer.from(chunk)) + } + + return Buffer.concat(chunks).toString('utf8') +} + +const port1 = Number(process.argv[2]) +const port2 = Number(process.argv[3]) +const msg = process.argv[4] + +async function oneTimeConnect (hostSpec) { + return new Promise((resolve, reject) => { + const socket = net.createConnection(hostSpec, async () => { + const resp = await streamToString(socket) + if (resp !== msg) { + reject(new Error(`Expected response ${msg}, got ${resp} instead.`)) + } else { + resolve() + } + }) + }) +} + +require('dd-trace').init().profilerStarted() + .then(() => { + oneTimeConnect({ host: '127.0.0.1', port: port1 }) + }).then(() => { + oneTimeConnect({ host: '127.0.0.1', port: port2 }) + }) diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index 7c6caa267f9..eae5153a41e 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -165,7 +165,7 @@ class NetDecorator { addLabel(this.operationNameLabelKey, op) if (op === 'connect') { const detail = item.detail - addLabel(this.stringTable, this.hostLabelKey, detail.host) + addLabel(this.hostLabelKey, detail.host) labels.push(new Label({ key: this.portLabelKey, num: detail.port })) } labels.push(this.lanes.getLabelFor(item)) From c5838465e8ef0e351f73385e4149d54c3c4b0ed2 Mon Sep 17 00:00:00 2001 From: Ayan Khan Date: Thu, 14 Dec 2023 09:31:29 -0500 Subject: [PATCH 110/147] Fix failing Aerospike tests (#3873) * fix failing Aerospike tests on CI --- .github/workflows/plugins.yml | 56 +++++++++++-- .../test/index.spec.js | 79 ++++++++----------- 2 files changed, 84 insertions(+), 51 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 7fdf623df79..1bf016b94f9 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -26,7 +26,7 @@ jobs: image: ubuntu:18.04 services: aerospike: - image: aerospike:ce-6.4.0.3 + image: aerospike:ce-5.3.0.16 ports: - 3000:3000 testagent: @@ -80,17 +80,17 @@ jobs: run: yarn test:plugins:ci - if: env.MAJOR_VERSION == '3' uses: codecov/codecov-action@v2 - aerospike: + aerospike-4: runs-on: ubuntu-latest services: aerospike: - image: aerospike:ce-6.4.0.3 + image: aerospike:ce-5.7.0.15 ports: - "127.0.0.1:3000-3002:3000-3002" env: PLUGINS: aerospike SERVICES: aerospike - PACKAGE_VERSION_RANGE: '4.0.0 - 5.7.0' + PACKAGE_VERSION_RANGE: '4.0.0 - 5.4.0' steps: - uses: actions/checkout@v2 - uses: ./.github/actions/testagent/start @@ -98,12 +98,54 @@ jobs: - run: yarn install --ignore-engines - uses: ./.github/actions/node/oldest - run: yarn test:plugins:ci - - run: echo "PACKAGE_VERSION_RANGE=>=5.8.0" >> "$GITHUB_ENV" - - uses: ./.github/actions/node/20 # currently the latest version of aerospike only supports node 20 - - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - uses: codecov/codecov-action@v2 + aerospike-5: + runs-on: ubuntu-latest + services: + aerospike: + image: aerospike:ce-6.4.0.3 + ports: + - "127.0.0.1:3000-3002:3000-3002" + env: + PLUGINS: aerospike + SERVICES: aerospike + PACKAGE_VERSION_RANGE: '5.5.0 - 5.7.0' + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/testagent/start + - uses: ./.github/actions/node/setup + - id: pkg + run: | + content=`cat ./package.json | tr '\n' ' '` + echo "::set-output name=json::$content" + - id: extract + run: | + version="${{fromJson(steps.pkg.outputs.json).version}}" + majorVersion=$(echo "$version" | cut -d '.' -f 1) + echo "Major Version: $majorVersion" + echo "MAJOR_VERSION=$majorVersion" >> $GITHUB_ENV + - name: Check package version + if: env.MAJOR_VERSION != '3' + run: | + echo "Package version is not 3. Proceeding with the next steps." + - if: env.MAJOR_VERSION != '3' + run: yarn install --ignore-engines + - if: env.MAJOR_VERSION != '3' + uses: ./.github/actions/node/oldest + - if: env.MAJOR_VERSION != '3' + run: yarn test:plugins:ci + - if: env.MAJOR_VERSION != '3' + run: echo "PACKAGE_VERSION_RANGE=>=5.8.0" >> "$GITHUB_ENV" + - if: env.MAJOR_VERSION != '3' + uses: ./.github/actions/node/20 # currently the latest version of aerospike only supports node 20 + - if: env.MAJOR_VERSION != '3' + run: yarn test:plugins:ci + - if: env.MAJOR_VERSION != '3' + uses: ./.github/actions/testagent/logs + - if: env.MAJOR_VERSION != '3' + uses: codecov/codecov-action@v2 amqp10: # TODO: move rhea to its own job runs-on: ubuntu-latest services: diff --git a/packages/datadog-plugin-aerospike/test/index.spec.js b/packages/datadog-plugin-aerospike/test/index.spec.js index 11202ef9cd4..6c301943245 100644 --- a/packages/datadog-plugin-aerospike/test/index.spec.js +++ b/packages/datadog-plugin-aerospike/test/index.spec.js @@ -1,10 +1,8 @@ 'use strict' const agent = require('../../dd-trace/test/plugins/agent') -const semver = require('semver') const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') const { expectedSchema, rawExpectedSchema } = require('./naming') -const { NODE_MAJOR } = require('../../../version') describe('Plugin', () => { let aerospike @@ -191,52 +189,45 @@ describe('Plugin', () => { }) }) - // skip query tests for node 16 and aerospike 4 because of an aerospike error that occurs when using query: - // AerospikeError: Sometimes our doc, or our customers' wishes, get ahead of us. - // We may have processed something that the server is not ready for (unsupported feature). - // this test works on node 14, so it is not a problem with the test but most likely a problem with the package - // version and aerospike server version mismatch which is really hard to pin down, since aerospike doesn't - // provide info on package version's compatibility with each server version - if (!(NODE_MAJOR === 16 && semver.intersects(version, '^4')) && !semver.intersects(version, '^3')) { - it('should instrument query', done => { - agent - .use(traces => { - const span = traces[0][0] - expect(span).to.have.property('name', expectedSchema.command.opName) - expect(span).to.have.property('service', expectedSchema.command.serviceName) - expect(span).to.have.property('resource', `Query`) - expect(span).to.have.property('type', 'aerospike') - expect(span.meta).to.have.property('span.kind', 'client') - expect(span.meta).to.have.property('aerospike.namespace', ns) - expect(span.meta).to.have.property('aerospike.setname', set) - expect(span.meta).to.have.property('component', 'aerospike') - }) - .then(done) - .catch(done) + it('should instrument query', done => { + agent + .use(traces => { + const span = traces[0][0] + expect(span).to.have.property('name', expectedSchema.command.opName) + expect(span).to.have.property('service', expectedSchema.command.serviceName) + expect(span).to.have.property('resource', `Query`) + expect(span).to.have.property('type', 'aerospike') + expect(span.meta).to.have.property('span.kind', 'client') + expect(span.meta).to.have.property('aerospike.namespace', ns) + expect(span.meta).to.have.property('aerospike.setname', set) + expect(span.meta).to.have.property('component', 'aerospike') + }) + .then(done) + .catch(done) - aerospike.connect(config).then(client => { - const index = { - ns: ns, - set: 'demo', - bin: 'tags', - index: 'tags_idx', - datatype: aerospike.indexDataType.STRING - } - client.createIndex(index, (error, job) => { - job.waitUntilDone((waitError) => { - const query = client.query(ns, 'demo') - const queryPolicy = { - totalTimeout: 10000 - } - query.select('id', 'tags') - query.where(aerospike.filter.contains('tags', 'green', aerospike.indexType.LIST)) - const stream = query.foreach(queryPolicy) - stream.on('end', () => { client.close(false) }) - }) + aerospike.connect(config).then(client => { + const index = { + ns: ns, + set: 'demo', + bin: 'tags', + index: 'tags_idx', + datatype: aerospike.indexDataType.STRING + } + client.createIndex(index, (error, job) => { + job.waitUntilDone((waitError) => { + const query = client.query(ns, 'demo') + const queryPolicy = { + totalTimeout: 10000 + } + query.select('id', 'tags') + query.where(aerospike.filter.contains('tags', 'green', aerospike.indexType.LIST)) + const stream = query.foreach(queryPolicy) + stream.on('end', () => { client.close(false) }) }) }) }) - } + }) + it('should run the callback in the parent context', done => { const obj = {} aerospike.connect(config).then(client => { From 66b4da9a9522a92855909f79a2a15b15b4f070a8 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Thu, 14 Dec 2023 17:01:23 +0100 Subject: [PATCH 111/147] Only run DNS and Net timeline events integration tests on Node 16+ (#3879) --- integration-tests/profiler.spec.js | 97 +++++++++++++++--------------- 1 file changed, 50 insertions(+), 47 deletions(-) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index 41ba92cd6a1..e192328a69e 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -13,6 +13,7 @@ const fsync = require('node:fs') const net = require('node:net') const zlib = require('node:zlib') const { Profile } = require('pprof-format') +const semver = require('semver') async function checkProfiles (agent, proc, timeout, expectedProfileTypes = ['wall', 'space'], expectBadExit = false, multiplicity = 1) { @@ -246,59 +247,61 @@ describe('profiler', () => { assert.equal(endpoints.size, 3) }) - it('dns timeline events work', async () => { - const dnsEvents = await gatherNetworkTimelineEvents(cwd, 'profiler/dnstest.js', 'dns', 'DNS') - assert.sameDeepMembers(dnsEvents, [ - { name: 'lookup', host: 'example.org' }, - { name: 'lookup', host: 'example.com' }, - { name: 'lookup', host: 'datadoghq.com' }, - { name: 'queryA', host: 'datadoghq.com' }, - { name: 'lookupService', address: '13.224.103.60', port: 80 } - ]) - }) + if (semver.gte(process.version, '16.0.0')) { + it('dns timeline events work', async () => { + const dnsEvents = await gatherNetworkTimelineEvents(cwd, 'profiler/dnstest.js', 'dns', 'DNS') + assert.sameDeepMembers(dnsEvents, [ + { name: 'lookup', host: 'example.org' }, + { name: 'lookup', host: 'example.com' }, + { name: 'lookup', host: 'datadoghq.com' }, + { name: 'queryA', host: 'datadoghq.com' }, + { name: 'lookupService', address: '13.224.103.60', port: 80 } + ]) + }) - it('net timeline events work', async () => { - // Simple server that writes a constant message to the socket. - const msg = 'cya later!\n' - function createServer () { - const server = net.createServer((socket) => { - socket.end(msg, 'utf8') - }).on('error', (err) => { - throw err - }) - return server - } - // Create two instances of the server - const server1 = createServer() - try { - const server2 = createServer() + it('net timeline events work', async () => { + // Simple server that writes a constant message to the socket. + const msg = 'cya later!\n' + function createServer () { + const server = net.createServer((socket) => { + socket.end(msg, 'utf8') + }).on('error', (err) => { + throw err + }) + return server + } + // Create two instances of the server + const server1 = createServer() try { - // Have the servers listen on ephemeral ports - const p = new Promise(resolve => { - server1.listen(0, () => { - server2.listen(0, async () => { - resolve([server1.address().port, server2.address().port]) + const server2 = createServer() + try { + // Have the servers listen on ephemeral ports + const p = new Promise(resolve => { + server1.listen(0, () => { + server2.listen(0, async () => { + resolve([server1.address().port, server2.address().port]) + }) }) }) - }) - const [ port1, port2 ] = await p - const args = [String(port1), String(port2), msg] - // Invoke the profiled program, passing it the ports of the servers and - // the expected message. - const events = await gatherNetworkTimelineEvents(cwd, 'profiler/nettest.js', 'net', 'Net', args) - // The profiled program should have two TCP connection events to the two - // servers. - assert.sameDeepMembers(events, [ - { name: 'connect', host: '127.0.0.1', port: port1 }, - { name: 'connect', host: '127.0.0.1', port: port2 } - ]) + const [ port1, port2 ] = await p + const args = [String(port1), String(port2), msg] + // Invoke the profiled program, passing it the ports of the servers and + // the expected message. + const events = await gatherNetworkTimelineEvents(cwd, 'profiler/nettest.js', 'net', 'Net', args) + // The profiled program should have two TCP connection events to the two + // servers. + assert.sameDeepMembers(events, [ + { name: 'connect', host: '127.0.0.1', port: port1 }, + { name: 'connect', host: '127.0.0.1', port: port2 } + ]) + } finally { + server2.close() + } } finally { - server2.close() + server1.close() } - } finally { - server1.close() - } - }) + }) + } context('shutdown', () => { beforeEach(async () => { From 514c21eadc5c21fc5878ba5acb3fc8fb3232167f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Thu, 14 Dec 2023 17:03:29 +0100 Subject: [PATCH 112/147] Fix tedious and elasticsearch plugin tests (#3877) --- .github/workflows/plugins.yml | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 1bf016b94f9..3fb218c5c6b 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -85,7 +85,7 @@ jobs: services: aerospike: image: aerospike:ce-5.7.0.15 - ports: + ports: - "127.0.0.1:3000-3002:3000-3002" env: PLUGINS: aerospike @@ -106,7 +106,7 @@ jobs: services: aerospike: image: aerospike:ce-6.4.0.3 - ports: + ports: - "127.0.0.1:3000-3002:3000-3002" env: PLUGINS: aerospike @@ -434,8 +434,6 @@ jobs: - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install - - uses: ./.github/actions/node/oldest - - run: yarn test:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - if: always() @@ -1211,9 +1209,6 @@ jobs: - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install - - uses: ./.github/actions/node/16 - - run: yarn test:plugins:ci - - run: yarn test:plugins:upstream - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - run: yarn test:plugins:upstream From fb923ac6798dc393440a7bba5bbb6a825a08347b Mon Sep 17 00:00:00 2001 From: Jordi Bertran de Balanda Date: Thu, 14 Dec 2023 17:04:33 +0100 Subject: [PATCH 113/147] DSM: add kafka offset lag (#3761) Add Kafka offset lag computation for DSM * trace consumer offset with upstream event emitter * trace producer offset with response data --------- Co-authored-by: Piotr Wolski --- docker-compose.yml | 11 +- .../datadog-instrumentations/src/kafkajs.js | 27 +++ .../datadog-plugin-kafkajs/src/consumer.js | 51 ++++++ .../datadog-plugin-kafkajs/src/producer.js | 55 ++++++ .../datadog-plugin-kafkajs/test/index.spec.js | 172 ++++++++++++++---- .../dd-trace/src/datastreams/processor.js | 119 ++++++++++-- packages/dd-trace/src/tracer.js | 4 + .../test/datastreams/encoding.spec.js | 1 - .../test/datastreams/processor.spec.js | 156 +++++++++++++--- 9 files changed, 521 insertions(+), 75 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index ed2cb8dfda9..6abe59c677d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -119,7 +119,8 @@ services: - EDGE_PORT_HTTP=4567 - LAMBDA_EXECUTOR=local kafka: - image: debezium/kafka:1.7 + platform: linux/arm64 + image: wurstmeister/kafka:2.13-2.8.1 ports: - "127.0.0.1:9092:9092" - "127.0.0.1:9093:9093" @@ -127,9 +128,17 @@ services: - CLUSTER_ID=5Yr1SIgYQz-b-dgRabWx4g - NODE_ID=1 - CREATE_TOPICS="test-topic:1:1" + - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181 - KAFKA_CONTROLLER_QUORUM_VOTERS=1@kafka:9093 + - KAFKA_LISTENERS=PLAINTEXT://0.0.0.0:9092 - KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092 - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0 + depends_on: + - zookeeper + zookeeper: + image: wurstmeister/zookeeper + ports: + - "2181:2181" opensearch: image: opensearchproject/opensearch:2 environment: diff --git a/packages/datadog-instrumentations/src/kafkajs.js b/packages/datadog-instrumentations/src/kafkajs.js index beaba513097..132a9e592b9 100644 --- a/packages/datadog-instrumentations/src/kafkajs.js +++ b/packages/datadog-instrumentations/src/kafkajs.js @@ -8,13 +8,31 @@ const { const shimmer = require('../../datadog-shimmer') const producerStartCh = channel('apm:kafkajs:produce:start') +const producerCommitCh = channel('apm:kafkajs:produce:commit') const producerFinishCh = channel('apm:kafkajs:produce:finish') const producerErrorCh = channel('apm:kafkajs:produce:error') const consumerStartCh = channel('apm:kafkajs:consume:start') +const consumerCommitCh = channel('apm:kafkajs:consume:commit') const consumerFinishCh = channel('apm:kafkajs:consume:finish') const consumerErrorCh = channel('apm:kafkajs:consume:error') +function commitsFromEvent (event) { + const { payload: { groupId, topics } } = event + const commitList = [] + for (const { topic, partitions } of topics) { + for (const { partition, offset } of partitions) { + commitList.push({ + groupId, + partition, + offset, + topic + }) + } + } + consumerCommitCh.publish(commitList) +} + addHook({ name: 'kafkajs', file: 'src/index.js', versions: ['>=1.4'] }, (BaseKafka) => { class Kafka extends BaseKafka { constructor (options) { @@ -58,6 +76,12 @@ addHook({ name: 'kafkajs', file: 'src/index.js', versions: ['>=1.4'] }, (BaseKaf }) ) + result.then(res => { + if (producerCommitCh.hasSubscribers) { + producerCommitCh.publish(res) + } + }) + return result } catch (e) { producerErrorCh.publish(e) @@ -75,6 +99,9 @@ addHook({ name: 'kafkajs', file: 'src/index.js', versions: ['>=1.4'] }, (BaseKaf } const consumer = createConsumer.apply(this, arguments) + + consumer.on(consumer.events.COMMIT_OFFSETS, commitsFromEvent) + const run = consumer.run const groupId = arguments[0].groupId diff --git a/packages/datadog-plugin-kafkajs/src/consumer.js b/packages/datadog-plugin-kafkajs/src/consumer.js index c29cb389e10..9056531ce89 100644 --- a/packages/datadog-plugin-kafkajs/src/consumer.js +++ b/packages/datadog-plugin-kafkajs/src/consumer.js @@ -7,6 +7,57 @@ class KafkajsConsumerPlugin extends ConsumerPlugin { static get id () { return 'kafkajs' } static get operation () { return 'consume' } + constructor () { + super(...arguments) + this.addSub('apm:kafkajs:consume:commit', message => this.commit(message)) + } + + /** + * Transform individual commit details sent by kafkajs' event reporter + * into actionable backlog items for DSM + * + * @typedef {object} ConsumerBacklog + * @property {number} type + * @property {string} consumer_group + * @property {string} topic + * @property {number} partition + * @property {number} offset + * + * @typedef {object} CommitEventItem + * @property {string} groupId + * @property {string} topic + * @property {number} partition + * @property {import('kafkajs/utils/long').Long} offset + * + * @param {CommitEventItem} commit + * @returns {ConsumerBacklog} + */ + transformCommit (commit) { + const { groupId, partition, offset, topic } = commit + return { + partition, + topic, + type: 'kafka_commit', + offset: Number(offset), + consumer_group: groupId + } + } + + commit (commitList) { + if (!this.config.dsmEnabled) return + const keys = [ + 'consumer_group', + 'type', + 'partition', + 'offset', + 'topic' + ] + for (const commit of commitList.map(this.transformCommit)) { + if (keys.some(key => !commit.hasOwnProperty(key))) continue + this.tracer.setOffset(commit) + } + } + start ({ topic, partition, message, groupId }) { const childOf = extract(this.tracer, message.headers) const span = this.startSpan({ diff --git a/packages/datadog-plugin-kafkajs/src/producer.js b/packages/datadog-plugin-kafkajs/src/producer.js index a753021440c..6c38b2b5689 100644 --- a/packages/datadog-plugin-kafkajs/src/producer.js +++ b/packages/datadog-plugin-kafkajs/src/producer.js @@ -11,6 +11,61 @@ class KafkajsProducerPlugin extends ProducerPlugin { static get operation () { return 'produce' } static get peerServicePrecursors () { return [BOOTSTRAP_SERVERS_KEY] } + constructor () { + super(...arguments) + this.addSub('apm:kafkajs:produce:commit', message => this.commit(message)) + } + + /** + * Transform individual commit details sent by kafkajs' event reporter + * into actionable backlog items for DSM + * + * @typedef {object} ProducerBacklog + * @property {number} type + * @property {string} topic + * @property {number} partition + * @property {number} offset + * + * @typedef {object} ProducerResponseItem + * @property {string} topic + * @property {number} partition + * @property {import('kafkajs/utils/long').Long} [offset] + * @property {import('kafkajs/utils/long').Long} [baseOffset] + * + * @param {ProducerResponseItem} response + * @returns {ProducerBacklog} + */ + transformProduceResponse (response) { + // In produce protocol >=v3, the offset key changes from `offset` to `baseOffset` + const { topicName: topic, partition, offset, baseOffset } = response + const offsetAsLong = offset || baseOffset + return { + type: 'kafka_produce', + partition, + offset: offsetAsLong ? Number(offsetAsLong) : undefined, + topic + } + } + + /** + * + * @param {ProducerResponseItem[]} commitList + * @returns {void} + */ + commit (commitList) { + if (!this.config.dsmEnabled) return + const keys = [ + 'type', + 'partition', + 'offset', + 'topic' + ] + for (const commit of commitList.map(this.transformProduceResponse)) { + if (keys.some(key => !commit.hasOwnProperty(key))) continue + this.tracer.setOffset(commit) + } + } + start ({ topic, messages, bootstrapServers }) { let pathwayCtx const span = this.startSpan({ diff --git a/packages/datadog-plugin-kafkajs/test/index.spec.js b/packages/datadog-plugin-kafkajs/test/index.spec.js index a797f83b94d..50a7fc202fb 100644 --- a/packages/datadog-plugin-kafkajs/test/index.spec.js +++ b/packages/datadog-plugin-kafkajs/test/index.spec.js @@ -1,6 +1,7 @@ 'use strict' const { expect } = require('chai') +const semver = require('semver') const agent = require('../../dd-trace/test/plugins/agent') const { expectSomeSpan, withDefaults } = require('../../dd-trace/test/plugins/helpers') const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') @@ -40,11 +41,12 @@ describe('Plugin', () => { process.env['DD_DATA_STREAMS_ENABLED'] = 'true' tracer = require('../../dd-trace') await agent.load('kafkajs') - Kafka = require(`../../../versions/kafkajs@${version}`).get().Kafka - + const lib = require(`../../../versions/kafkajs@${version}`).get() + Kafka = lib.Kafka kafka = new Kafka({ clientId: `kafkajs-test-${version}`, - brokers: ['127.0.0.1:9092'] + brokers: ['127.0.0.1:9092'], + logLevel: lib.logLevel.WARN }) }) describe('producer', () => { @@ -113,7 +115,8 @@ describe('Plugin', () => { return expectedSpanPromise } }) - if (version !== '1.4.0') { + // Dynamic broker list support added in 1.14/2.0 (https://github.com/tulios/kafkajs/commit/62223) + if (semver.intersects(version, '>=1.14')) { it('should not extract bootstrap servers when initialized with a function', async () => { const expectedSpanPromise = agent.use(traces => { const span = traces[0][0] @@ -166,7 +169,6 @@ describe('Plugin', () => { eachMessage: () => {} }) await sendMessages(kafka, testTopic, messages) - return expectedSpanPromise }) @@ -272,6 +274,7 @@ describe('Plugin', () => { describe('data stream monitoring', () => { let consumer + beforeEach(async () => { tracer.init() tracer.use('kafkajs', { dsmEnabled: true }) @@ -284,48 +287,137 @@ describe('Plugin', () => { await consumer.disconnect() }) - it('Should set a checkpoint on produce', async () => { - const messages = [{ key: 'consumerDSM1', value: 'test2' }] - const setDataStreamsContextSpy = sinon.spy(DataStreamsContext, 'setDataStreamsContext') - await sendMessages(kafka, testTopic, messages) - expect(setDataStreamsContextSpy.args[0][0].hash).to.equal(expectedProducerHash) - setDataStreamsContextSpy.restore() - }) + describe('checkpoints', () => { + let setDataStreamsContextSpy - it('Should set a checkpoint on consume', async () => { - await sendMessages(kafka, testTopic, messages) - const setDataStreamsContextSpy = sinon.spy(DataStreamsContext, 'setDataStreamsContext') - await consumer.run({ - eachMessage: async ({ topic, partition, message, heartbeat, pause }) => { - expect(setDataStreamsContextSpy.args[0][0].hash).to.equal(expectedConsumerHash) + beforeEach(() => { + setDataStreamsContextSpy = sinon.spy(DataStreamsContext, 'setDataStreamsContext') + }) + + afterEach(() => { + setDataStreamsContextSpy.restore() + }) + + const expectedProducerHash = computePathwayHash( + 'test', + 'tester', + ['direction:out', 'topic:' + testTopic, 'type:kafka'], + ENTRY_PARENT_HASH + ) + const expectedConsumerHash = computePathwayHash( + 'test', + 'tester', + ['direction:in', 'group:test-group', 'topic:' + testTopic, 'type:kafka'], + expectedProducerHash + ) + + it('Should set a checkpoint on produce', async () => { + const messages = [{ key: 'consumerDSM1', value: 'test2' }] + await sendMessages(kafka, testTopic, messages) + expect(setDataStreamsContextSpy.args[0][0].hash).to.equal(expectedProducerHash) + }) + + it('Should set a checkpoint on consume', async () => { + const runArgs = [] + await consumer.run({ + eachMessage: async () => { + runArgs.push(setDataStreamsContextSpy.lastCall.args[0]) + } + }) + await sendMessages(kafka, testTopic, messages) + await consumer.disconnect() + for (const runArg of runArgs) { + expect(runArg.hash).to.equal(expectedConsumerHash) } }) - setDataStreamsContextSpy.restore() - }) - it('Should set a message payload size when producing a message', async () => { - const messages = [{ key: 'key1', value: 'test2' }] - if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { - DataStreamsProcessor.prototype.recordCheckpoint.restore() - } - const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') - await sendMessages(kafka, testTopic, messages) - expect(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) - recordCheckpointSpy.restore() + it('Should set a message payload size when producing a message', async () => { + const messages = [{ key: 'key1', value: 'test2' }] + if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { + DataStreamsProcessor.prototype.recordCheckpoint.restore() + } + const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') + await sendMessages(kafka, testTopic, messages) + expect(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) + recordCheckpointSpy.restore() + }) + + it('Should set a message payload size when consuming a message', async () => { + const messages = [{ key: 'key1', value: 'test2' }] + if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { + DataStreamsProcessor.prototype.recordCheckpoint.restore() + } + const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') + await sendMessages(kafka, testTopic, messages) + await consumer.run({ + eachMessage: async () => { + expect(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) + recordCheckpointSpy.restore() + } + }) + }) }) - it('Should set a message payload size when consuming a message', async () => { - const messages = [{ key: 'key1', value: 'test2' }] - if (DataStreamsProcessor.prototype.recordCheckpoint.isSinonProxy) { - DataStreamsProcessor.prototype.recordCheckpoint.restore() + describe('backlogs', () => { + let setOffsetSpy + + beforeEach(() => { + setOffsetSpy = sinon.spy(tracer._tracer._dataStreamsProcessor, 'setOffset') + }) + + afterEach(() => { + setOffsetSpy.restore() + }) + + if (semver.intersects(version, '>=1.10')) { + it('Should add backlog on consumer explicit commit', async () => { + // Send a message, consume it, and record the last consumed offset + let commitMeta + await sendMessages(kafka, testTopic, messages) + await consumer.run({ + eachMessage: async payload => { + const { topic, partition, message } = payload + commitMeta = { + topic, + partition, + offset: Number(message.offset) + } + }, + autoCommit: false + }) + await new Promise(resolve => setTimeout(resolve, 50)) // Let eachMessage be called + await consumer.disconnect() // Flush ongoing `eachMessage` calls + for (const call of setOffsetSpy.getCalls()) { + expect(call.args[0]).to.not.have.property('type', 'kafka_commit') + } + + /** + * No choice but to reinitialize everything, because the only way to flush eachMessage + * calls is to disconnect. + */ + consumer.connect() + await sendMessages(kafka, testTopic, messages) + await consumer.run({ eachMessage: async () => {}, autoCommit: false }) + setOffsetSpy.resetHistory() + await consumer.commitOffsets([commitMeta]) + await consumer.disconnect() + + // Check our work + const runArg = setOffsetSpy.lastCall.args[0] + expect(setOffsetSpy).to.be.calledOnce + expect(runArg).to.have.property('offset', commitMeta.offset) + expect(runArg).to.have.property('partition', commitMeta.partition) + expect(runArg).to.have.property('topic', commitMeta.topic) + expect(runArg).to.have.property('type', 'kafka_commit') + expect(runArg).to.have.property('consumer_group', 'test-group') + }) } - const recordCheckpointSpy = sinon.spy(DataStreamsProcessor.prototype, 'recordCheckpoint') - await sendMessages(kafka, testTopic, messages) - await consumer.run({ - eachMessage: async () => { - expect(recordCheckpointSpy.args[0][0].hasOwnProperty('payloadSize')) - recordCheckpointSpy.restore() - } + + it('Should add backlog on producer response', async () => { + await sendMessages(kafka, testTopic, messages) + expect(setOffsetSpy).to.be.calledOnce + const { topic } = setOffsetSpy.lastCall.args[0] + expect(topic).to.equal(testTopic) }) }) }) diff --git a/packages/dd-trace/src/datastreams/processor.js b/packages/dd-trace/src/datastreams/processor.js index 601d81441d8..3e78561ee04 100644 --- a/packages/dd-trace/src/datastreams/processor.js +++ b/packages/dd-trace/src/datastreams/processor.js @@ -45,14 +45,73 @@ class StatsPoint { } } -class StatsBucket extends Map { +class Backlog { + constructor ({ offset, ...tags }) { + this._tags = Object.keys(tags).sort().map(key => `${key}:${tags[key]}`) + this._hash = this._tags.join(',') + this._offset = offset + } + + get hash () { return this._hash } + + get offset () { return this._offset } + + get tags () { return this._tags } + + encode () { + return { + Tags: this.tags, + Value: this.offset + } + } +} + +class StatsBucket { + constructor () { + this._checkpoints = new Map() + this._backlogs = new Map() + } + + get checkpoints () { + return this._checkpoints + } + + get backlogs () { + return this._backlogs + } + forCheckpoint (checkpoint) { const key = checkpoint.hash - if (!this.has(key)) { - this.set(key, new StatsPoint(checkpoint.hash, checkpoint.parentHash, checkpoint.edgeTags)) // StatsPoint + if (!this._checkpoints.has(key)) { + this._checkpoints.set( + key, new StatsPoint(checkpoint.hash, checkpoint.parentHash, checkpoint.edgeTags) + ) } - return this.get(key) + return this._checkpoints.get(key) + } + + /** + * Conditionally add a backlog to the bucket. If there is currently an offset + * matching the backlog's tags, overwrite the offset IFF the backlog's offset + * is greater than the recorded offset. + * + * @typedef {{[key: string]: string}} BacklogData + * @property {number} offset + * + * @param {BacklogData} backlogData + * @returns {Backlog} + */ + forBacklog (backlogData) { + const backlog = new Backlog(backlogData) + const existingBacklog = this._backlogs.get(backlog.hash) + if (existingBacklog !== undefined) { + if (existingBacklog.offset > backlog.offset) { + return existingBacklog + } + } + this._backlogs.set(backlog.hash, backlog) + return backlog } } @@ -122,12 +181,12 @@ class DataStreamsProcessor { } onInterval () { - const serialized = this._serializeBuckets() - if (!serialized) return + const { Stats } = this._serializeBuckets() + if (Stats.length === 0) return const payload = { Env: this.env, Service: this.service, - Stats: serialized, + Stats, TracerVersion: pkg.version, Version: this.version, Lang: 'javascript' @@ -135,10 +194,19 @@ class DataStreamsProcessor { this.writer.flush(payload) } + /** + * Given a timestamp in nanoseconds, compute and return the closest TimeBucket + * @param {number} timestamp + * @returns {StatsBucket} + */ + bucketFromTimestamp (timestamp) { + const bucketTime = Math.round(timestamp - (timestamp % this.bucketSizeNs)) + return this.buckets.forTime(bucketTime) + } + recordCheckpoint (checkpoint, span = null) { if (!this.enabled) return - const bucketTime = Math.round(checkpoint.currentTimestamp - (checkpoint.currentTimestamp % this.bucketSizeNs)) - this.buckets.forTime(bucketTime) + this.bucketFromTimestamp(checkpoint.currentTimestamp) .forCheckpoint(checkpoint) .addLatencies(checkpoint) // set DSM pathway hash on span to enable related traces feature on DSM tab, convert from buffer to uint64 @@ -207,26 +275,52 @@ class DataStreamsProcessor { return dataStreamsContext } + recordOffset ({ timestamp, ...backlogData }) { + if (!this.enabled) return + return this.bucketFromTimestamp(timestamp) + .forBacklog(backlogData) + } + + setOffset (offsetObj) { + if (!this.enabled) return + const nowNs = Date.now() * 1e6 + const backlogData = { + ...offsetObj, + timestamp: nowNs + } + this.recordOffset(backlogData) + } + _serializeBuckets () { + // TimeBuckets const serializedBuckets = [] for (const [ timeNs, bucket ] of this.buckets.entries()) { const points = [] - for (const stats of bucket.values()) { + // bucket: StatsBucket + // stats: StatsPoint + for (const stats of bucket._checkpoints.values()) { points.push(stats.encode()) } + const backlogs = [] + for (const backlog of bucket._backlogs.values()) { + backlogs.push(backlog.encode()) + } serializedBuckets.push({ Start: new Uint64(timeNs), Duration: new Uint64(this.bucketSizeNs), - Stats: points + Stats: points, + Backlogs: backlogs }) } this.buckets.clear() - return serializedBuckets + return { + Stats: serializedBuckets + } } } @@ -234,6 +328,7 @@ module.exports = { DataStreamsProcessor: DataStreamsProcessor, StatsPoint: StatsPoint, StatsBucket: StatsBucket, + Backlog, TimeBuckets, getMessageSize, getHeadersSize, diff --git a/packages/dd-trace/src/tracer.js b/packages/dd-trace/src/tracer.js index afa7da037b2..3626872be6b 100644 --- a/packages/dd-trace/src/tracer.js +++ b/packages/dd-trace/src/tracer.js @@ -46,6 +46,10 @@ class DatadogTracer extends Tracer { return ctx } + setOffset (offsetData) { + return this._dataStreamsProcessor.setOffset(offsetData) + } + trace (name, options, fn) { options = Object.assign({ childOf: this.scope().active() diff --git a/packages/dd-trace/test/datastreams/encoding.spec.js b/packages/dd-trace/test/datastreams/encoding.spec.js index ea7a78b17e3..548573e96ba 100644 --- a/packages/dd-trace/test/datastreams/encoding.spec.js +++ b/packages/dd-trace/test/datastreams/encoding.spec.js @@ -1,7 +1,6 @@ 'use strict' require('../setup/tap') - const { encodeVarint, decodeVarint } = require('../../src/datastreams/encoding') const { expect } = require('chai') diff --git a/packages/dd-trace/test/datastreams/processor.spec.js b/packages/dd-trace/test/datastreams/processor.spec.js index 11425d039a1..fe2f31e0c36 100644 --- a/packages/dd-trace/test/datastreams/processor.spec.js +++ b/packages/dd-trace/test/datastreams/processor.spec.js @@ -23,6 +23,7 @@ const writer = { const DataStreamsWriter = sinon.stub().returns(writer) const { StatsPoint, + Backlog, StatsBucket, TimeBuckets, DataStreamsProcessor, @@ -75,29 +76,106 @@ describe('StatsPoint', () => { }) describe('StatsBucket', () => { - const buckets = new StatsBucket() + describe('Checkpoints', () => { + let buckets + beforeEach(() => { buckets = new StatsBucket() }) - it('should start empty', () => { - expect(buckets.size).to.equal(0) - }) + it('should start empty', () => { + expect(buckets.checkpoints.size).to.equal(0) + }) - it('should add a new entry when no matching key is found', () => { - const bucket = buckets.forCheckpoint(mockCheckpoint) - expect(bucket).to.be.an.instanceOf(StatsPoint) - expect(buckets.size).to.equal(1) - const [key, value] = Array.from(buckets.entries())[0] - expect(key.toString()).to.equal(mockCheckpoint.hash.toString()) - expect(value).to.be.instanceOf(StatsPoint) - }) + it('should add a new entry when no matching key is found', () => { + const bucket = buckets.forCheckpoint(mockCheckpoint) + const checkpoints = buckets.checkpoints + expect(bucket).to.be.an.instanceOf(StatsPoint) + expect(checkpoints.size).to.equal(1) + const [key, value] = Array.from(checkpoints.entries())[0] + expect(key.toString()).to.equal(mockCheckpoint.hash.toString()) + expect(value).to.be.instanceOf(StatsPoint) + }) - it('should not add a new entry if matching key is found', () => { - buckets.forCheckpoint(mockCheckpoint) - expect(buckets.size).to.equal(1) + it('should not add a new entry if matching key is found', () => { + buckets.forCheckpoint(mockCheckpoint) + buckets.forCheckpoint(mockCheckpoint) + expect(buckets.checkpoints.size).to.equal(1) + }) + + it('should add a new entry when new checkpoint does not match existing agg keys', () => { + buckets.forCheckpoint(mockCheckpoint) + buckets.forCheckpoint(anotherMockCheckpoint) + expect(buckets.checkpoints.size).to.equal(2) + }) }) - it('should add a new entry when new checkpoint does not match existing agg keys', () => { - buckets.forCheckpoint(anotherMockCheckpoint) - expect(buckets.size).to.equal(2) + describe('Backlogs', () => { + let backlogBuckets + const mockBacklog = { + offset: 12, + type: 'kafka_consume', + consumer_group: 'test-consumer', + partition: 0, + topic: 'test-topic' + } + + beforeEach(() => { + backlogBuckets = new StatsBucket() + }) + + it('should start empty', () => { + expect(backlogBuckets.backlogs.size).to.equal(0) + }) + + it('should add a new entry when empty', () => { + const bucket = backlogBuckets.forBacklog(mockBacklog) + const backlogs = backlogBuckets.backlogs + expect(bucket).to.be.an.instanceOf(Backlog) + const [, value] = Array.from(backlogs.entries())[0] + expect(value).to.be.instanceOf(Backlog) + }) + + it('should add a new entry when given different tags', () => { + const otherMockBacklog = { + offset: 1, + type: 'kafka_consume', + consumer_group: 'test-consumer', + partition: 1, + topic: 'test-topic' + } + + backlogBuckets.forBacklog(mockBacklog) + backlogBuckets.forBacklog(otherMockBacklog) + expect(backlogBuckets.backlogs.size).to.equal(2) + }) + + it('should update the existing entry if offset is higher', () => { + const higherMockBacklog = { + offset: 16, + type: 'kafka_consume', + consumer_group: 'test-consumer', + partition: 0, + topic: 'test-topic' + } + + backlogBuckets.forBacklog(mockBacklog) + const backlog = backlogBuckets.forBacklog(higherMockBacklog) + expect(backlog.offset).to.equal(higherMockBacklog.offset) + expect(backlogBuckets.backlogs.size).to.equal(1) + }) + + it('should discard the passed backlog if offset is lower', () => { + const lowerMockBacklog = { + offset: 2, + type: 'kafka_consume', + consumer_group: 'test-consumer', + partition: 0, + topic: 'test-topic' + } + + backlogBuckets.forBacklog(mockBacklog) + const backlog = backlogBuckets.forBacklog(lowerMockBacklog) + expect(backlog.offset).to.equal(mockBacklog.offset) + expect(backlogBuckets.backlogs.size).to.equal(1) + }) }) }) @@ -128,6 +206,11 @@ describe('DataStreamsProcessor', () => { tags: { tag: 'some tag' } } + beforeEach(() => { + processor = new DataStreamsProcessor(config) + clearTimeout(processor.timer) + }) + it('should construct', () => { processor = new DataStreamsProcessor(config) clearTimeout(processor.timer) @@ -144,6 +227,35 @@ describe('DataStreamsProcessor', () => { expect(processor.tags).to.deep.equal(config.tags) }) + it('should track backlogs', () => { + const mockBacklog = { + offset: 12, + type: 'kafka_consume', + consumer_group: 'test-consumer', + partition: 0, + topic: 'test-topic' + } + expect(processor.buckets.size).to.equal(0) + processor.recordOffset({ timestamp: DEFAULT_TIMESTAMP, ...mockBacklog }) + expect(processor.buckets.size).to.equal(1) + + const timeBucket = processor.buckets.values().next().value + expect(timeBucket).to.be.instanceOf(StatsBucket) + expect(timeBucket.backlogs.size).to.equal(1) + + const backlog = timeBucket.forBacklog(mockBacklog) + expect(timeBucket.backlogs.size).to.equal(1) + expect(backlog).to.be.instanceOf(Backlog) + + const encoded = backlog.encode() + expect(encoded).to.deep.equal({ + Tags: [ + 'consumer_group:test-consumer', 'partition:0', 'topic:test-topic', 'type:kafka_consume' + ], + Value: 12 + }) + }) + it('should track latency stats', () => { expect(processor.buckets.size).to.equal(0) processor.recordCheckpoint(mockCheckpoint) @@ -151,10 +263,10 @@ describe('DataStreamsProcessor', () => { const timeBucket = processor.buckets.values().next().value expect(timeBucket).to.be.instanceOf(StatsBucket) - expect(timeBucket.size).to.equal(1) + expect(timeBucket.checkpoints.size).to.equal(1) const checkpointBucket = timeBucket.forCheckpoint(mockCheckpoint) - expect(timeBucket.size).to.equal(1) + expect(timeBucket.checkpoints.size).to.equal(1) expect(checkpointBucket).to.be.instanceOf(StatsPoint) edgeLatency = new LogCollapsingLowestDenseDDSketch(0.00775) @@ -174,6 +286,7 @@ describe('DataStreamsProcessor', () => { }) it('should export on interval', () => { + processor.recordCheckpoint(mockCheckpoint) processor.onInterval() expect(writer.flush).to.be.calledWith({ Env: 'test', @@ -189,7 +302,8 @@ describe('DataStreamsProcessor', () => { EdgeLatency: edgeLatency.toProto(), PathwayLatency: pathwayLatency.toProto(), PayloadSize: payloadSize.toProto() - }] + }], + Backlogs: [] }], TracerVersion: pkg.version, Lang: 'javascript' From 6e0faf1124f9bb9aa0a5c1e2d1e6a20c0e628c34 Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Thu, 14 Dec 2023 08:45:14 -0800 Subject: [PATCH 114/147] ci: no longer test sharedb against node v14 (#3881) - seems no longer compatible as of v4.1.2 - https://github.com/share/sharedb/commit/06cc3877fd9b53d1ddc3ecdd3df11f68fbdbb100\#diff-3fb720f70ea34fb2975fd6c38ac4b6b6131373be2abfa4760dfaae8a25daaee2R11 - Uncaught TypeError: util.hasOwn is not a function --- .github/workflows/plugins.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 3fb218c5c6b..fc69f6014d9 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -1182,8 +1182,6 @@ jobs: - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install - - uses: ./.github/actions/node/oldest - - run: yarn test:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - if: always() From cb90e4ef6f7e3fe00b1b316c006029ca0aa40e3d Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Thu, 14 Dec 2023 17:58:04 +0100 Subject: [PATCH 115/147] PROF-8521: Remove lane logic from profiler library (#3880) --- integration-tests/profiler.spec.js | 12 ++---- .../src/profiling/profilers/events.js | 39 +------------------ 2 files changed, 5 insertions(+), 46 deletions(-) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index e192328a69e..10903cad81f 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -66,7 +66,7 @@ async function getLatestProfile (cwd, pattern) { return Profile.decode(pprofUnzipped) } -async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, threadName, args) { +async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, args) { const procStart = BigInt(Date.now() * 1000000) const proc = fork(path.join(cwd, scriptFilePath), args, { cwd, @@ -89,13 +89,11 @@ async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, thre const hostKey = strings.dedup('host') const addressKey = strings.dedup('address') const portKey = strings.dedup('port') - const threadNameKey = strings.dedup('thread name') const nameKey = strings.dedup('operation') const eventValue = strings.dedup(eventType) const events = [] - const threadNamePrefix = `Main ${threadName}-` for (const sample of prof.sample) { - let ts, event, host, address, port, name, threadName + let ts, event, host, address, port, name for (const label of sample.label) { switch (label.key) { case tsKey: ts = label.num; break @@ -104,7 +102,6 @@ async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, thre case hostKey: host = label.str; break case addressKey: address = label.str; break case portKey: port = label.num; break - case threadNameKey: threadName = label.str; break default: assert.fail(`Unexpected label key ${label.key} ${strings.strings[label.key]}`) } } @@ -114,7 +111,6 @@ async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, thre assert.isTrue(ts >= procStart) // Gather only DNS events; ignore sporadic GC events if (event === eventValue) { - assert.isTrue(strings.strings[threadName].startsWith(threadNamePrefix)) assert.isDefined(name) // Exactly one of these is defined assert.isTrue(!!address !== !!host) @@ -249,7 +245,7 @@ describe('profiler', () => { if (semver.gte(process.version, '16.0.0')) { it('dns timeline events work', async () => { - const dnsEvents = await gatherNetworkTimelineEvents(cwd, 'profiler/dnstest.js', 'dns', 'DNS') + const dnsEvents = await gatherNetworkTimelineEvents(cwd, 'profiler/dnstest.js', 'dns') assert.sameDeepMembers(dnsEvents, [ { name: 'lookup', host: 'example.org' }, { name: 'lookup', host: 'example.com' }, @@ -287,7 +283,7 @@ describe('profiler', () => { const args = [String(port1), String(port2), msg] // Invoke the profiled program, passing it the ports of the servers and // the expected message. - const events = await gatherNetworkTimelineEvents(cwd, 'profiler/nettest.js', 'net', 'Net', args) + const events = await gatherNetworkTimelineEvents(cwd, 'profiler/nettest.js', 'net', args) // The profiled program should have two TCP connection events to the two // servers. assert.sameDeepMembers(events, [ diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index eae5153a41e..3e1f4cd9a07 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -1,5 +1,5 @@ const { performance, constants, PerformanceObserver } = require('node:perf_hooks') -const { END_TIMESTAMP, THREAD_NAME, threadNamePrefix } = require('./shared') +const { END_TIMESTAMP } = require('./shared') const semver = require('semver') const { Function, Label, Line, Location, Profile, Sample, StringTable, ValueType } = require('pprof-format') const pprof = require('@datadog/pprof/') @@ -74,39 +74,6 @@ class GCDecorator { } } -// Maintains "lanes" (or virtual threads) to avoid overlaps in events. The -// decorator starts out with no lanes, and dynamically adds them as needed. -// Every event is put in the first lane where it doesn't overlap with the last -// event in that lane. If there's no lane without overlaps, a new lane is -// created. -class Lanes { - constructor (stringTable, name) { - this.stringTable = stringTable - this.name = name - this.lanes = [] - } - - getLabelFor (item) { - const startTime = item.startTime - const endTime = startTime + item.duration - - // Biases towards populating earlier lanes, but at least it's simple - for (const lane of this.lanes) { - if (lane.endTime <= startTime) { - lane.endTime = endTime - return lane.label - } - } - const label = labelFromStrStr( - this.stringTable, - THREAD_NAME, - `${this.name}-${this.lanes.length}` - ) - this.lanes.push({ endTime, label }) - return label - } -} - class DNSDecorator { constructor (stringTable) { this.stringTable = stringTable @@ -114,7 +81,6 @@ class DNSDecorator { this.hostLabelKey = stringTable.dedup('host') this.addressLabelKey = stringTable.dedup('address') this.portLabelKey = stringTable.dedup('port') - this.lanes = new Lanes(stringTable, `${threadNamePrefix} DNS`) } decorateSample (sampleInput, item) { @@ -142,7 +108,6 @@ class DNSDecorator { addLabel(this.hostLabelKey, detail.host) } } - labels.push(this.lanes.getLabelFor(item)) } } @@ -152,7 +117,6 @@ class NetDecorator { this.operationNameLabelKey = stringTable.dedup('operation') this.hostLabelKey = stringTable.dedup('host') this.portLabelKey = stringTable.dedup('port') - this.lanes = new Lanes(stringTable, `${threadNamePrefix} Net`) } decorateSample (sampleInput, item) { @@ -168,7 +132,6 @@ class NetDecorator { addLabel(this.hostLabelKey, detail.host) labels.push(new Label({ key: this.portLabelKey, num: detail.port })) } - labels.push(this.lanes.getLabelFor(item)) } } From d9dedc75c4912ab3c1ac5a0d144f9ce39ea3a255 Mon Sep 17 00:00:00 2001 From: Ida Liu <119438987+ida613@users.noreply.github.com> Date: Thu, 14 Dec 2023 12:03:20 -0500 Subject: [PATCH 116/147] quick telemetry test fix (#3882) --- packages/dd-trace/test/telemetry/index.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/dd-trace/test/telemetry/index.spec.js b/packages/dd-trace/test/telemetry/index.spec.js index 51895d33e00..ea6f974d6d3 100644 --- a/packages/dd-trace/test/telemetry/index.spec.js +++ b/packages/dd-trace/test/telemetry/index.spec.js @@ -89,7 +89,7 @@ describe('telemetry', () => { return testSeq(1, 'app-started', payload => { expect(payload).to.have.property('products').that.deep.equal({ appsec: { enabled: true }, - profiler: { version: '5.0.0-pre', enabled: true } + profiler: { version: tracerVersion, enabled: true } }) expect(payload).to.have.property('configuration').that.deep.equal([ { name: 'telemetry.enabled', value: true, origin: 'unknown' }, From cf05c82f99c8f285af5fc40ffa506a4d6c68b376 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Thu, 14 Dec 2023 19:56:31 +0100 Subject: [PATCH 117/147] ignore pino error tests when node version is 21 (#3878) - specifically looking for exactly node 21 - this way we will have to fix it when node 22 LTS is released --------- Co-authored-by: Thomas Hunter II --- .github/workflows/plugins.yml | 2 ++ packages/datadog-plugin-pino/test/index.spec.js | 8 ++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index fc69f6014d9..6ba2e693766 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -1030,6 +1030,8 @@ jobs: - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install + - uses: ./.github/actions/node/20 + - run: yarn test:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci # - run: yarn test:plugins:upstream diff --git a/packages/datadog-plugin-pino/test/index.spec.js b/packages/datadog-plugin-pino/test/index.spec.js index 77048a6d6ac..a11c7abf6df 100644 --- a/packages/datadog-plugin-pino/test/index.spec.js +++ b/packages/datadog-plugin-pino/test/index.spec.js @@ -3,6 +3,7 @@ const Writable = require('stream').Writable const agent = require('../../dd-trace/test/plugins/agent') const semver = require('semver') +const { NODE_MAJOR } = require('../../../version') describe('Plugin', () => { let logger @@ -131,8 +132,11 @@ describe('Plugin', () => { expect(record.err).to.have.property('stack', error.stack) } else { // pino <7 expect(record).to.have.property('msg', error.message) - expect(record).to.have.property('type', 'Error') - expect(record).to.have.property('stack', error.stack) + // ** TODO ** add this back once we fix it + if (NODE_MAJOR !== 21) { + expect(record).to.have.property('type', 'Error') + expect(record).to.have.property('stack', error.stack) + } } }) }) From f31172cf9d22d441e40bae4daea366b3eeca57fe Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Thu, 14 Dec 2023 14:49:57 -0800 Subject: [PATCH 118/147] fix rhea memory leak concerning inFlightDeliveries (#3833) Co-authored-by: Martin Disch --- packages/datadog-instrumentations/src/rhea.js | 24 ++++++---- .../datadog-plugin-rhea/test/index.spec.js | 45 +++++++++++++++++++ 2 files changed, 60 insertions(+), 9 deletions(-) diff --git a/packages/datadog-instrumentations/src/rhea.js b/packages/datadog-instrumentations/src/rhea.js index 955b72b67b2..61431dcbdfc 100644 --- a/packages/datadog-instrumentations/src/rhea.js +++ b/packages/datadog-instrumentations/src/rhea.js @@ -22,7 +22,7 @@ const dispatchReceiveCh = channel('apm:rhea:receive:dispatch') const errorReceiveCh = channel('apm:rhea:receive:error') const finishReceiveCh = channel('apm:rhea:receive:finish') -const contexts = new WeakMap() +const contexts = new WeakMap() // key: delivery Fn, val: context addHook({ name: 'rhea', versions: ['>=1'] }, rhea => { shimmer.wrap(rhea.message, 'encode', encode => function (msg) { @@ -52,7 +52,8 @@ addHook({ name: 'rhea', versions: ['>=1'], file: 'lib/link.js' }, obj => { startSendCh.publish({ targetAddress, host, port, msg }) const delivery = send.apply(this, arguments) const context = { - asyncResource + asyncResource, + connection: this.connection } contexts.set(delivery, context) @@ -80,7 +81,8 @@ addHook({ name: 'rhea', versions: ['>=1'], file: 'lib/link.js' }, obj => { if (msgObj.delivery) { const context = { - asyncResource + asyncResource, + connection: this.connection } contexts.set(msgObj.delivery, context) msgObj.delivery.update = wrapDeliveryUpdate(msgObj.delivery, msgObj.delivery.update) @@ -114,7 +116,7 @@ addHook({ name: 'rhea', versions: ['>=1'], file: 'lib/connection.js' }, Connecti asyncResource.runInAsyncScope(() => { errorReceiveCh.publish(error) - beforeFinish(delivery, null) + exports.beforeFinish(delivery, null) finishReceiveCh.publish() }) }) @@ -187,7 +189,7 @@ function patchCircularBuffer (proto, Session) { const state = remoteState && remoteState.constructor ? entry.remote_state.constructor.composite_type : undefined asyncResource.runInAsyncScope(() => { - beforeFinish(entry, state) + exports.beforeFinish(entry, state) finishSendCh.publish() }) } @@ -217,13 +219,13 @@ function addToInFlightDeliveries (connection, delivery) { } function beforeFinish (delivery, state) { - const obj = contexts.get(delivery) - if (obj) { + const context = contexts.get(delivery) + if (context) { if (state) { dispatchReceiveCh.publish({ state }) } - if (obj.connection && obj.connection[inFlightDeliveries]) { - obj.connection[inFlightDeliveries].delete(delivery) + if (context.connection && context.connection[inFlightDeliveries]) { + context.connection[inFlightDeliveries].delete(delivery) } } } @@ -238,3 +240,7 @@ function getStateFromData (stateData) { } } } + +module.exports.inFlightDeliveries = inFlightDeliveries +module.exports.beforeFinish = beforeFinish +module.exports.contexts = contexts diff --git a/packages/datadog-plugin-rhea/test/index.spec.js b/packages/datadog-plugin-rhea/test/index.spec.js index f55ead76fb8..93148c01703 100644 --- a/packages/datadog-plugin-rhea/test/index.spec.js +++ b/packages/datadog-plugin-rhea/test/index.spec.js @@ -206,6 +206,51 @@ describe('Plugin', () => { }) }) + describe('connection cleanup', () => { + let container + let context + let spy + let rheaInstumentation + + beforeEach(() => agent.reload('rhea')) + + beforeEach(done => { + rheaInstumentation = require('../../datadog-instrumentations/src/rhea') + spy = sinon.spy(rheaInstumentation, 'beforeFinish') + container = require(`../../../versions/rhea@${version}`).get() + + container.once('sendable', _context => { + context = _context + done() + }) + const connection = container.connect({ + username: 'admin', + password: 'admin', + host: 'localhost', + port: 5673 + }) + connection.open_sender('amq.topic') + connection.open_receiver('amq.topic') + }) + + it('should automatically instrument', (done) => { + agent.use(traces => { + const beforeFinishContext = rheaInstumentation.contexts.get(spy.firstCall.firstArg) + expect(spy).to.have.been.called + expect(beforeFinishContext).to.have.property('connection') + expect(beforeFinishContext.connection).to.have.property(rheaInstumentation.inFlightDeliveries) + expect(beforeFinishContext.connection[rheaInstumentation.inFlightDeliveries]).to.be.instanceof(Set) + expect(beforeFinishContext.connection[rheaInstumentation.inFlightDeliveries].size).to.equal(0) + }) + .then(done, done) + context.sender.send({ body: 'Hello World!' }) + }) + + afterEach(() => { + spy.restore() + }) + }) + describe('without broker', () => { let server let serverContext From c7148cf28324d81a2f4fa0bb6f7eddf5274fc6a6 Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Fri, 15 Dec 2023 10:51:12 +0100 Subject: [PATCH 119/147] Upgrade iast rewriter version to 2.2.2 (#3883) --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 3e59b71a937..75beb5dc68b 100644 --- a/package.json +++ b/package.json @@ -69,7 +69,7 @@ }, "dependencies": { "@datadog/native-appsec": "5.0.0", - "@datadog/native-iast-rewriter": "2.2.1", + "@datadog/native-iast-rewriter": "2.2.2", "@datadog/native-iast-taint-tracking": "1.6.4", "@datadog/native-metrics": "^2.0.0", "@datadog/pprof": "4.1.0", diff --git a/yarn.lock b/yarn.lock index 895312d70b4..bd8743d5f0b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -419,10 +419,10 @@ dependencies: node-gyp-build "^3.9.0" -"@datadog/native-iast-rewriter@2.2.1": - version "2.2.1" - resolved "https://registry.npmjs.org/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.1.tgz" - integrity "sha1-PHTFqMqguHbgkenFqVJWrdDXPhw= sha512-DyZlE8gNa5AoOFNKGRJU4RYF/Y/tJzv4bIAMuVBbEnMA0xhiIYqpYQG8T3OKkALl3VSEeBMjYwuOR2fCrJ6gzA==" +"@datadog/native-iast-rewriter@2.2.2": + version "2.2.2" + resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.2.tgz#3f7feaf6be1af4c83ad063065b8ed509bbaf11cb" + integrity sha512-13ZBhJpjZ/tiV6rYfyAf/ITye9cyd3x12M/2NKhD4Ivev4N4uKBREAjpArOtzKtPXZ5b6oXwVV4ofT1SHoYyzA== dependencies: lru-cache "^7.14.0" node-gyp-build "^4.5.0" From ad5bad43c5a7560d56d239335e93bbe41e3b6a2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 15 Dec 2023 15:29:27 +0100 Subject: [PATCH 120/147] [ci-visibility] Update repository url validation (#3876) --- .../src/plugins/util/user-provided-git.js | 5 +- .../exporters/git/git_metadata.spec.js | 33 ++-- .../plugins/util/ci-env/azurepipelines.json | 58 +++++++ .../test/plugins/util/ci-env/bitbucket.json | 40 +++++ .../test/plugins/util/ci-env/bitrise.json | 44 +++++ .../test/plugins/util/ci-env/buddy.json | 62 +++++++ .../test/plugins/util/ci-env/buildkite.json | 64 +++++++ .../test/plugins/util/ci-env/circleci.json | 48 ++++++ .../test/plugins/util/ci-env/codefresh.json | 162 ------------------ .../test/plugins/util/ci-env/gitlab.json | 82 +++++++++ .../test/plugins/util/ci-env/jenkins.json | 44 +++++ .../test/plugins/util/ci-env/teamcity.json | 112 ------------ .../plugins/util/ci-env/usersupplied.json | 50 ++++++ .../plugins/util/test-environment.spec.js | 3 +- .../dd-trace/test/plugins/util/test.spec.js | 6 +- 15 files changed, 518 insertions(+), 295 deletions(-) diff --git a/packages/dd-trace/src/plugins/util/user-provided-git.js b/packages/dd-trace/src/plugins/util/user-provided-git.js index 4a18a1c58be..b6389a778eb 100644 --- a/packages/dd-trace/src/plugins/util/user-provided-git.js +++ b/packages/dd-trace/src/plugins/util/user-provided-git.js @@ -27,10 +27,11 @@ function removeEmptyValues (tags) { }, {}) } -// The regex is extracted from +// The regex is inspired by // https://github.com/jonschlinkert/is-git-url/blob/396965ffabf2f46656c8af4c47bef1d69f09292e/index.js#L9C15-L9C87 +// The `.git` suffix is optional in this version function validateGitRepositoryUrl (repoUrl) { - return /(?:git|ssh|https?|git@[-\w.]+):(\/\/)?(.*?)(\.git)(\/?|#[-\d\w._]+?)$/.test(repoUrl) + return /(?:git|ssh|https?|git@[-\w.]+):(\/\/)?(.*?)(\/?|#[-\d\w._]+?)$/.test(repoUrl) } function validateGitCommitSha (gitCommitSha) { diff --git a/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js b/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js index d50e4a9cb66..db5cb2a70de 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/git/git_metadata.spec.js @@ -220,28 +220,29 @@ describe('git_metadata', () => { }) describe('validateGitRepositoryUrl', () => { it('should return false if Git repository URL is invalid', () => { - const invalidUrl1 = 'https://test.com' - const invalidUrl2 = 'https://test.com' - const invalidUrl3 = 'http://test.com/repo/dummy.4git' - const invalidUrl4 = 'https://test.com/repo/dummy.gi' - const invalidUrl5 = 'www.test.com/repo/dummy.git' - const invalidUrl6 = 'test.com/repo/dummy.git' - - const invalidUrls = [invalidUrl1, invalidUrl2, invalidUrl3, invalidUrl4, invalidUrl5, invalidUrl6] + const invalidUrls = [ + 'www.test.com/repo/dummy.git', + 'test.com/repo/dummy.git', + 'test.com/repo/dummy' + ] invalidUrls.forEach((invalidUrl) => { - expect(validateGitRepositoryUrl(invalidUrl)).to.be.false + expect(validateGitRepositoryUrl(invalidUrl), `${invalidUrl} is a valid URL`).to.be.false }) }) it('should return true if Git repository URL is valid', () => { - const validUrl1 = 'https://test.com/repo/dummy.git' - const validUrl2 = 'http://test.com/repo/dummy.git' - const validUrl3 = 'https://github.com/DataDog/dd-trace-js.git' - const validUrl4 = 'git@github.com:DataDog/dd-trace-js.git' - const validUrl5 = 'git@github.com:user/repo.git' + const validUrls = [ + 'https://test.com', + 'https://test.com/repo/dummy.git', + 'http://test.com/repo/dummy.git', + 'https://github.com/DataDog/dd-trace-js.git', + 'https://github.com/DataDog/dd-trace-js', + 'git@github.com:DataDog/dd-trace-js.git', + 'git@github.com:user/repo.git', + 'git@github.com:user/repo' + ] - const validUrls = [validUrl1, validUrl2, validUrl3, validUrl4, validUrl5] validUrls.forEach((validUrl) => { - expect(validateGitRepositoryUrl(validUrl)).to.be.true + expect(validateGitRepositoryUrl(validUrl), `${validUrl} is an invalid URL`).to.be.true }) }) }) diff --git a/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json b/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json index 594da6d147b..904e3fe9b26 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json +++ b/packages/dd-trace/test/plugins/util/ci-env/azurepipelines.json @@ -650,6 +650,64 @@ "git.tag": "0.0.2" } ], + [ + { + "BUILD_BUILDID": "azure-pipelines-build-id", + "BUILD_DEFINITIONNAME": "azure-pipelines-name", + "BUILD_REPOSITORY_URI": "https://dev.azure.com/fabrikamfiber/repo", + "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", + "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", + "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix", + "SYSTEM_JOBID": "azure-pipelines-job-id", + "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", + "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", + "TF_BUILD": "True" + }, + { + "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.pipeline.id": "azure-pipelines-build-id", + "ci.pipeline.name": "azure-pipelines-name", + "ci.pipeline.number": "azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.provider.name": "azurepipelines", + "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", + "git.commit.author.name": "azure-pipelines-commit-author", + "git.commit.message": "azure-pipelines-commit-message", + "git.repository_url": "https://dev.azure.com/fabrikamfiber/repo" + } + ], + [ + { + "BUILD_BUILDID": "azure-pipelines-build-id", + "BUILD_DEFINITIONNAME": "azure-pipelines-name", + "BUILD_REPOSITORY_URI": "ssh://host.xz:port/path/to/repo/", + "BUILD_REQUESTEDFOREMAIL": "azure-pipelines-commit-author-email@datadoghq.com", + "BUILD_REQUESTEDFORID": "azure-pipelines-commit-author", + "BUILD_SOURCEVERSIONMESSAGE": "azure-pipelines-commit-message", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", + "SYSTEM_JOBID": "azure-pipelines-job-id", + "SYSTEM_TASKINSTANCEID": "azure-pipelines-task-id", + "SYSTEM_TEAMFOUNDATIONSERVERURI": "https://azure-pipelines-server-uri.com/", + "SYSTEM_TEAMPROJECTID": "azure-pipelines-project-id", + "TF_BUILD": "True" + }, + { + "_dd.ci.env_vars": "{\"SYSTEM_TEAMPROJECTID\":\"azure-pipelines-project-id\",\"BUILD_BUILDID\":\"azure-pipelines-build-id\",\"SYSTEM_JOBID\":\"azure-pipelines-job-id\"}", + "ci.job.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id&view=logs&j=azure-pipelines-job-id&t=azure-pipelines-task-id", + "ci.pipeline.id": "azure-pipelines-build-id", + "ci.pipeline.name": "azure-pipelines-name", + "ci.pipeline.number": "azure-pipelines-build-id", + "ci.pipeline.url": "https://azure-pipelines-server-uri.com/azure-pipelines-project-id/_build/results?buildId=azure-pipelines-build-id", + "ci.provider.name": "azurepipelines", + "git.commit.author.email": "azure-pipelines-commit-author-email@datadoghq.com", + "git.commit.author.name": "azure-pipelines-commit-author", + "git.commit.message": "azure-pipelines-commit-message", + "git.repository_url": "ssh://host.xz:port/path/to/repo/" + } + ], [ { "BUILD_BUILDID": "azure-pipelines-build-id", diff --git a/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json b/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json index 72d47cdff00..019621c8999 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json +++ b/packages/dd-trace/test/plugins/util/ci-env/bitbucket.json @@ -400,6 +400,46 @@ "git.tag": "0.0.2" } ], + [ + { + "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "https://bitbucket.org/DataDog/dogweb", + "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", + "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix" + }, + { + "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.pipeline.id": "bitbucket-uuid", + "ci.pipeline.name": "bitbucket-repo", + "ci.pipeline.number": "bitbucket-build-num", + "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.provider.name": "bitbucket", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://bitbucket.org/DataDog/dogweb" + } + ], + [ + { + "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", + "BITBUCKET_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BITBUCKET_GIT_HTTP_ORIGIN": "ssh://host.xz:port/path/to/repo/", + "BITBUCKET_PIPELINE_UUID": "{bitbucket-uuid}", + "BITBUCKET_REPO_FULL_NAME": "bitbucket-repo", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" + }, + { + "ci.job.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.pipeline.id": "bitbucket-uuid", + "ci.pipeline.name": "bitbucket-repo", + "ci.pipeline.number": "bitbucket-build-num", + "ci.pipeline.url": "https://bitbucket.org/bitbucket-repo/addon/pipelines/home#!/results/bitbucket-build-num", + "ci.provider.name": "bitbucket", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo/" + } + ], [ { "BITBUCKET_BUILD_NUMBER": "bitbucket-build-num", diff --git a/packages/dd-trace/test/plugins/util/ci-env/bitrise.json b/packages/dd-trace/test/plugins/util/ci-env/bitrise.json index 6f5b52cdf90..73b753340cd 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/bitrise.json +++ b/packages/dd-trace/test/plugins/util/ci-env/bitrise.json @@ -479,6 +479,50 @@ "git.tag": "0.0.2" } ], + [ + { + "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", + "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", + "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "https://github.com/DataDog/dogweb" + }, + { + "ci.pipeline.id": "bitrise-pipeline-id", + "ci.pipeline.name": "bitrise-pipeline-name", + "ci.pipeline.number": "bitrise-pipeline-number", + "ci.pipeline.url": "https://bitrise-build-url.com//", + "ci.provider.name": "bitrise", + "git.commit.message": "bitrise-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/DataDog/dogweb" + } + ], + [ + { + "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", + "BITRISE_BUILD_SLUG": "bitrise-pipeline-id", + "BITRISE_BUILD_URL": "https://bitrise-build-url.com//", + "BITRISE_GIT_MESSAGE": "bitrise-git-commit-message", + "BITRISE_TRIGGERED_WORKFLOW_ID": "bitrise-pipeline-name", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", + "GIT_CLONE_COMMIT_HASH": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_REPOSITORY_URL": "ssh://host.xz:port/path/to/repo/" + }, + { + "ci.pipeline.id": "bitrise-pipeline-id", + "ci.pipeline.name": "bitrise-pipeline-name", + "ci.pipeline.number": "bitrise-pipeline-number", + "ci.pipeline.url": "https://bitrise-build-url.com//", + "ci.provider.name": "bitrise", + "git.commit.message": "bitrise-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo/" + } + ], [ { "BITRISE_BUILD_NUMBER": "bitrise-pipeline-number", diff --git a/packages/dd-trace/test/plugins/util/ci-env/buddy.json b/packages/dd-trace/test/plugins/util/ci-env/buddy.json index 007cc196652..3a43ab27a6c 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/buddy.json +++ b/packages/dd-trace/test/plugins/util/ci-env/buddy.json @@ -178,6 +178,68 @@ "git.tag": "v1.0" } ], + [ + { + "BUDDY": "true", + "BUDDY_EXECUTION_BRANCH": "master", + "BUDDY_EXECUTION_ID": "buddy-execution-id", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", + "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", + "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", + "BUDDY_EXECUTION_TAG": "v1.0", + "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "BUDDY_PIPELINE_ID": "456", + "BUDDY_PIPELINE_NAME": "Deploy to Production", + "BUDDY_SCM_URL": "https://github.com/buddyworks/my-project", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix" + }, + { + "ci.pipeline.id": "456/buddy-execution-id", + "ci.pipeline.name": "Deploy to Production", + "ci.pipeline.number": "buddy-execution-id", + "ci.pipeline.url": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "ci.provider.name": "buddy", + "git.branch": "master", + "git.commit.committer.email": "mikebenson@buddy.works", + "git.commit.committer.name": "Mike Benson", + "git.commit.message": "Create buddy.yml", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/buddyworks/my-project", + "git.tag": "v1.0" + } + ], + [ + { + "BUDDY": "true", + "BUDDY_EXECUTION_BRANCH": "master", + "BUDDY_EXECUTION_ID": "buddy-execution-id", + "BUDDY_EXECUTION_REVISION": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUDDY_EXECUTION_REVISION_COMMITTER_EMAIL": "mikebenson@buddy.works", + "BUDDY_EXECUTION_REVISION_COMMITTER_NAME": "Mike Benson", + "BUDDY_EXECUTION_REVISION_MESSAGE": "Create buddy.yml", + "BUDDY_EXECUTION_TAG": "v1.0", + "BUDDY_EXECUTION_URL": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "BUDDY_PIPELINE_ID": "456", + "BUDDY_PIPELINE_NAME": "Deploy to Production", + "BUDDY_SCM_URL": "ssh://host.xz:port/path/to/repo/", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" + }, + { + "ci.pipeline.id": "456/buddy-execution-id", + "ci.pipeline.name": "Deploy to Production", + "ci.pipeline.number": "buddy-execution-id", + "ci.pipeline.url": "https://app.buddy.works/myworkspace/my-project/pipelines/pipeline/456/execution/5d9dc42c422f5a268b389d08", + "ci.provider.name": "buddy", + "git.branch": "master", + "git.commit.committer.email": "mikebenson@buddy.works", + "git.commit.committer.name": "Mike Benson", + "git.commit.message": "Create buddy.yml", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo/", + "git.tag": "v1.0" + } + ], [ { "BUDDY": "true", diff --git a/packages/dd-trace/test/plugins/util/ci-env/buildkite.json b/packages/dd-trace/test/plugins/util/ci-env/buildkite.json index 421904b20e6..b3bc32975e3 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/buildkite.json +++ b/packages/dd-trace/test/plugins/util/ci-env/buildkite.json @@ -673,6 +673,70 @@ "git.tag": "0.0.2" } ], + [ + { + "BUILDKITE": "true", + "BUILDKITE_BRANCH": "", + "BUILDKITE_BUILD_AUTHOR": "buildkite-git-commit-author-name", + "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", + "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", + "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUILDKITE_JOB_ID": "buildkite-job-id", + "BUILDKITE_MESSAGE": "buildkite-git-commit-message", + "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", + "BUILDKITE_REPO": "https://github.com/DataDog/dogweb", + "BUILDKITE_TAG": "", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix" + }, + { + "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", + "ci.pipeline.id": "buildkite-pipeline-id", + "ci.pipeline.name": "buildkite-pipeline-name", + "ci.pipeline.number": "buildkite-pipeline-number", + "ci.pipeline.url": "https://buildkite-build-url.com", + "ci.provider.name": "buildkite", + "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", + "git.commit.author.name": "buildkite-git-commit-author-name", + "git.commit.message": "buildkite-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/DataDog/dogweb" + } + ], + [ + { + "BUILDKITE": "true", + "BUILDKITE_BRANCH": "", + "BUILDKITE_BUILD_AUTHOR": "buildkite-git-commit-author-name", + "BUILDKITE_BUILD_AUTHOR_EMAIL": "buildkite-git-commit-author-email@datadoghq.com", + "BUILDKITE_BUILD_ID": "buildkite-pipeline-id", + "BUILDKITE_BUILD_NUMBER": "buildkite-pipeline-number", + "BUILDKITE_BUILD_URL": "https://buildkite-build-url.com", + "BUILDKITE_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "BUILDKITE_JOB_ID": "buildkite-job-id", + "BUILDKITE_MESSAGE": "buildkite-git-commit-message", + "BUILDKITE_PIPELINE_SLUG": "buildkite-pipeline-name", + "BUILDKITE_REPO": "ssh://host.xz:port/path/to/repo/", + "BUILDKITE_TAG": "", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" + }, + { + "_dd.ci.env_vars": "{\"BUILDKITE_BUILD_ID\":\"buildkite-pipeline-id\",\"BUILDKITE_JOB_ID\":\"buildkite-job-id\"}", + "ci.job.url": "https://buildkite-build-url.com#buildkite-job-id", + "ci.pipeline.id": "buildkite-pipeline-id", + "ci.pipeline.name": "buildkite-pipeline-name", + "ci.pipeline.number": "buildkite-pipeline-number", + "ci.pipeline.url": "https://buildkite-build-url.com", + "ci.provider.name": "buildkite", + "git.commit.author.email": "buildkite-git-commit-author-email@datadoghq.com", + "git.commit.author.name": "buildkite-git-commit-author-name", + "git.commit.message": "buildkite-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo/" + } + ], [ { "BUILDKITE": "true", diff --git a/packages/dd-trace/test/plugins/util/ci-env/circleci.json b/packages/dd-trace/test/plugins/util/ci-env/circleci.json index b9065be3bd6..ec61ea8205f 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/circleci.json +++ b/packages/dd-trace/test/plugins/util/ci-env/circleci.json @@ -519,6 +519,54 @@ "git.tag": "0.0.2" } ], + [ + { + "CIRCLECI": "circleCI", + "CIRCLE_BUILD_NUM": "circleci-pipeline-number", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", + "CIRCLE_JOB": "circleci-job-name", + "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", + "CIRCLE_REPOSITORY_URL": "https://github.com/DataDog/dogweb", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix" + }, + { + "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", + "ci.job.name": "circleci-job-name", + "ci.job.url": "https://circleci-build-url.com/", + "ci.pipeline.id": "circleci-pipeline-id", + "ci.pipeline.name": "circleci-pipeline-name", + "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", + "ci.provider.name": "circleci", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/DataDog/dogweb" + } + ], + [ + { + "CIRCLECI": "circleCI", + "CIRCLE_BUILD_NUM": "circleci-pipeline-number", + "CIRCLE_BUILD_URL": "https://circleci-build-url.com/", + "CIRCLE_JOB": "circleci-job-name", + "CIRCLE_PROJECT_REPONAME": "circleci-pipeline-name", + "CIRCLE_REPOSITORY_URL": "ssh://host.xz:port/path/to/repo/", + "CIRCLE_SHA1": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CIRCLE_WORKFLOW_ID": "circleci-pipeline-id", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" + }, + { + "_dd.ci.env_vars": "{\"CIRCLE_WORKFLOW_ID\":\"circleci-pipeline-id\",\"CIRCLE_BUILD_NUM\":\"circleci-pipeline-number\"}", + "ci.job.name": "circleci-job-name", + "ci.job.url": "https://circleci-build-url.com/", + "ci.pipeline.id": "circleci-pipeline-id", + "ci.pipeline.name": "circleci-pipeline-name", + "ci.pipeline.url": "https://app.circleci.com/pipelines/workflows/circleci-pipeline-id", + "ci.provider.name": "circleci", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo/" + } + ], [ { "CIRCLECI": "circleCI", diff --git a/packages/dd-trace/test/plugins/util/ci-env/codefresh.json b/packages/dd-trace/test/plugins/util/ci-env/codefresh.json index 7b1367b4f09..d719df10592 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/codefresh.json +++ b/packages/dd-trace/test/plugins/util/ci-env/codefresh.json @@ -158,167 +158,5 @@ "git.repository_url": "git@github.com:DataDog/userrepo.git", "git.tag": "0.0.2" } - ], - [ - { - "CF_BUILD_ID": "6410367cee516146a4c4c66e", - "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", - "CF_STEP_NAME": "mah-job-name", - "DD_GIT_REPOSITORY_URL": "https://user:password@github.com/DataDog/dogweb.git" - }, - { - "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", - "ci.job.name": "mah-job-name", - "ci.pipeline.id": "6410367cee516146a4c4c66e", - "ci.pipeline.name": "My simple project/Example Java Project Pipeline", - "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "ci.provider.name": "codefresh", - "git.repository_url": "https://github.com/DataDog/dogweb.git" - } - ], - [ - { - "CF_BUILD_ID": "6410367cee516146a4c4c66e", - "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", - "CF_STEP_NAME": "mah-job-name", - "DD_GIT_REPOSITORY_URL": "https://user@github.com/DataDog/dogweb.git" - }, - { - "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", - "ci.job.name": "mah-job-name", - "ci.pipeline.id": "6410367cee516146a4c4c66e", - "ci.pipeline.name": "My simple project/Example Java Project Pipeline", - "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "ci.provider.name": "codefresh", - "git.repository_url": "https://github.com/DataDog/dogweb.git" - } - ], - [ - { - "CF_BUILD_ID": "6410367cee516146a4c4c66e", - "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", - "CF_STEP_NAME": "mah-job-name", - "DD_GIT_REPOSITORY_URL": "https://user:password@github.com:1234/DataDog/dogweb.git" - }, - { - "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", - "ci.job.name": "mah-job-name", - "ci.pipeline.id": "6410367cee516146a4c4c66e", - "ci.pipeline.name": "My simple project/Example Java Project Pipeline", - "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "ci.provider.name": "codefresh", - "git.repository_url": "https://github.com:1234/DataDog/dogweb.git" - } - ], - [ - { - "CF_BUILD_ID": "6410367cee516146a4c4c66e", - "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", - "CF_STEP_NAME": "mah-job-name", - "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1/DataDog/dogweb.git" - }, - { - "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", - "ci.job.name": "mah-job-name", - "ci.pipeline.id": "6410367cee516146a4c4c66e", - "ci.pipeline.name": "My simple project/Example Java Project Pipeline", - "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "ci.provider.name": "codefresh", - "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" - } - ], - [ - { - "CF_BUILD_ID": "6410367cee516146a4c4c66e", - "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", - "CF_STEP_NAME": "mah-job-name", - "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1/DataDog/dogweb.git" - }, - { - "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", - "ci.job.name": "mah-job-name", - "ci.pipeline.id": "6410367cee516146a4c4c66e", - "ci.pipeline.name": "My simple project/Example Java Project Pipeline", - "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "ci.provider.name": "codefresh", - "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" - } - ], - [ - { - "CF_BUILD_ID": "6410367cee516146a4c4c66e", - "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", - "CF_STEP_NAME": "mah-job-name", - "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb.git" - }, - { - "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", - "ci.job.name": "mah-job-name", - "ci.pipeline.id": "6410367cee516146a4c4c66e", - "ci.pipeline.name": "My simple project/Example Java Project Pipeline", - "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "ci.provider.name": "codefresh", - "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb.git" - } - ], - [ - { - "CF_BUILD_ID": "6410367cee516146a4c4c66e", - "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", - "CF_STEP_NAME": "mah-job-name", - "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" - }, - { - "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", - "ci.job.name": "mah-job-name", - "ci.pipeline.id": "6410367cee516146a4c4c66e", - "ci.pipeline.name": "My simple project/Example Java Project Pipeline", - "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "ci.provider.name": "codefresh", - "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" - } - ], - [ - { - "CF_BUILD_ID": "6410367cee516146a4c4c66e", - "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", - "CF_STEP_NAME": "mah-job-name", - "DD_GIT_REPOSITORY_URL": "ssh://user@host.xz:port/path/to/repo.git/" - }, - { - "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", - "ci.job.name": "mah-job-name", - "ci.pipeline.id": "6410367cee516146a4c4c66e", - "ci.pipeline.name": "My simple project/Example Java Project Pipeline", - "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "ci.provider.name": "codefresh", - "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" - } - ], - [ - { - "CF_BUILD_ID": "6410367cee516146a4c4c66e", - "CF_BUILD_URL": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "CF_PIPELINE_NAME": "My simple project/Example Java Project Pipeline", - "CF_STEP_NAME": "mah-job-name", - "DD_GIT_REPOSITORY_URL": "ssh://user:password@host.xz:port/path/to/repo.git/" - }, - { - "_dd.ci.env_vars": "{\"CF_BUILD_ID\":\"6410367cee516146a4c4c66e\"}", - "ci.job.name": "mah-job-name", - "ci.pipeline.id": "6410367cee516146a4c4c66e", - "ci.pipeline.name": "My simple project/Example Java Project Pipeline", - "ci.pipeline.url": "https://g.codefresh.io/build/6410367cee516146a4c4c66e", - "ci.provider.name": "codefresh", - "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" - } ] ] diff --git a/packages/dd-trace/test/plugins/util/ci-env/gitlab.json b/packages/dd-trace/test/plugins/util/ci-env/gitlab.json index 400d99c977d..7556df309e6 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/gitlab.json +++ b/packages/dd-trace/test/plugins/util/ci-env/gitlab.json @@ -451,6 +451,88 @@ "git.tag": "0.1.0" } ], + [ + { + "CI_COMMIT_AUTHOR": "John Doe ", + "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", + "CI_JOB_ID": "gitlab-job-id", + "CI_JOB_NAME": "gitlab-job-name", + "CI_JOB_STAGE": "gitlab-stage-name", + "CI_JOB_URL": "https://gitlab.com/job", + "CI_PIPELINE_ID": "gitlab-pipeline-id", + "CI_PIPELINE_IID": "gitlab-pipeline-number", + "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", + "CI_PROJECT_DIR": "/foo/bar", + "CI_PROJECT_PATH": "gitlab-pipeline-name", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "http://hostname.com/repo", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix", + "GITLAB_CI": "gitlab" + }, + { + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "ci.job.name": "gitlab-job-name", + "ci.job.url": "https://gitlab.com/job", + "ci.pipeline.id": "gitlab-pipeline-id", + "ci.pipeline.name": "gitlab-pipeline-name", + "ci.pipeline.number": "gitlab-pipeline-number", + "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", + "ci.provider.name": "gitlab", + "ci.stage.name": "gitlab-stage-name", + "ci.workspace_path": "/foo/bar", + "git.branch": "master", + "git.commit.author.date": "2021-07-21T11:43:07-04:00", + "git.commit.author.email": "john@doe.com", + "git.commit.author.name": "John Doe", + "git.commit.message": "gitlab-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "http://hostname.com/repo" + } + ], + [ + { + "CI_COMMIT_AUTHOR": "John Doe ", + "CI_COMMIT_MESSAGE": "gitlab-git-commit-message", + "CI_COMMIT_REF_NAME": "origin/master", + "CI_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "CI_COMMIT_TIMESTAMP": "2021-07-21T11:43:07-04:00", + "CI_JOB_ID": "gitlab-job-id", + "CI_JOB_NAME": "gitlab-job-name", + "CI_JOB_STAGE": "gitlab-stage-name", + "CI_JOB_URL": "https://gitlab.com/job", + "CI_PIPELINE_ID": "gitlab-pipeline-id", + "CI_PIPELINE_IID": "gitlab-pipeline-number", + "CI_PIPELINE_URL": "https://foo/repo/-/pipelines/1234", + "CI_PROJECT_DIR": "/foo/bar", + "CI_PROJECT_PATH": "gitlab-pipeline-name", + "CI_PROJECT_URL": "https://gitlab.com/repo", + "CI_REPOSITORY_URL": "ssh://host.xz:port/path/to/repo/", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", + "GITLAB_CI": "gitlab" + }, + { + "_dd.ci.env_vars": "{\"CI_PROJECT_URL\":\"https://gitlab.com/repo\",\"CI_PIPELINE_ID\":\"gitlab-pipeline-id\",\"CI_JOB_ID\":\"gitlab-job-id\"}", + "ci.job.name": "gitlab-job-name", + "ci.job.url": "https://gitlab.com/job", + "ci.pipeline.id": "gitlab-pipeline-id", + "ci.pipeline.name": "gitlab-pipeline-name", + "ci.pipeline.number": "gitlab-pipeline-number", + "ci.pipeline.url": "https://foo/repo/-/pipelines/1234", + "ci.provider.name": "gitlab", + "ci.stage.name": "gitlab-stage-name", + "ci.workspace_path": "/foo/bar", + "git.branch": "master", + "git.commit.author.date": "2021-07-21T11:43:07-04:00", + "git.commit.author.email": "john@doe.com", + "git.commit.author.name": "John Doe", + "git.commit.message": "gitlab-git-commit-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo/" + } + ], [ { "CI_COMMIT_AUTHOR": "John Doe ", diff --git a/packages/dd-trace/test/plugins/util/ci-env/jenkins.json b/packages/dd-trace/test/plugins/util/ci-env/jenkins.json index f87cdbd2a36..045c27270aa 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/jenkins.json +++ b/packages/dd-trace/test/plugins/util/ci-env/jenkins.json @@ -666,6 +666,50 @@ "git.tag": "0.0.2" } ], + [ + { + "BUILD_NUMBER": "jenkins-pipeline-number", + "BUILD_TAG": "jenkins-pipeline-id", + "BUILD_URL": "https://jenkins.com/pipeline", + "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL_1": "https://github.com/DataDog/dogweb", + "JENKINS_URL": "jenkins", + "JOB_URL": "https://jenkins.com/job" + }, + { + "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", + "ci.pipeline.id": "jenkins-pipeline-id", + "ci.pipeline.number": "jenkins-pipeline-number", + "ci.pipeline.url": "https://jenkins.com/pipeline", + "ci.provider.name": "jenkins", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/DataDog/dogweb" + } + ], + [ + { + "BUILD_NUMBER": "jenkins-pipeline-number", + "BUILD_TAG": "jenkins-pipeline-id", + "BUILD_URL": "https://jenkins.com/pipeline", + "DD_CUSTOM_TRACE_ID": "jenkins-custom-trace-id", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix", + "GIT_COMMIT": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "GIT_URL_1": "ssh://host.xz:port/path/to/repo/", + "JENKINS_URL": "jenkins", + "JOB_URL": "https://jenkins.com/job" + }, + { + "_dd.ci.env_vars": "{\"DD_CUSTOM_TRACE_ID\":\"jenkins-custom-trace-id\"}", + "ci.pipeline.id": "jenkins-pipeline-id", + "ci.pipeline.number": "jenkins-pipeline-number", + "ci.pipeline.url": "https://jenkins.com/pipeline", + "ci.provider.name": "jenkins", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo/" + } + ], [ { "BUILD_NUMBER": "jenkins-pipeline-number", diff --git a/packages/dd-trace/test/plugins/util/ci-env/teamcity.json b/packages/dd-trace/test/plugins/util/ci-env/teamcity.json index 037887c4ae0..086c1c16de1 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/teamcity.json +++ b/packages/dd-trace/test/plugins/util/ci-env/teamcity.json @@ -74,117 +74,5 @@ "git.repository_url": "git@github.com:DataDog/userrepo.git", "git.tag": "0.0.2" } - ], - [ - { - "BUILD_URL": "https://teamcity.com/repo", - "DD_GIT_REPOSITORY_URL": "https://user:password@github.com/DataDog/dogweb.git", - "TEAMCITY_BUILDCONF_NAME": "Test 1", - "TEAMCITY_VERSION": "2022.10 (build 116751)" - }, - { - "ci.job.name": "Test 1", - "ci.job.url": "https://teamcity.com/repo", - "ci.provider.name": "teamcity", - "git.repository_url": "https://github.com/DataDog/dogweb.git" - } - ], - [ - { - "BUILD_URL": "https://teamcity.com/repo", - "DD_GIT_REPOSITORY_URL": "https://user@github.com/DataDog/dogweb.git", - "TEAMCITY_BUILDCONF_NAME": "Test 1", - "TEAMCITY_VERSION": "2022.10 (build 116751)" - }, - { - "ci.job.name": "Test 1", - "ci.job.url": "https://teamcity.com/repo", - "ci.provider.name": "teamcity", - "git.repository_url": "https://github.com/DataDog/dogweb.git" - } - ], - [ - { - "BUILD_URL": "https://teamcity.com/repo", - "DD_GIT_REPOSITORY_URL": "https://user:password@github.com:1234/DataDog/dogweb.git", - "TEAMCITY_BUILDCONF_NAME": "Test 1", - "TEAMCITY_VERSION": "2022.10 (build 116751)" - }, - { - "ci.job.name": "Test 1", - "ci.job.url": "https://teamcity.com/repo", - "ci.provider.name": "teamcity", - "git.repository_url": "https://github.com:1234/DataDog/dogweb.git" - } - ], - [ - { - "BUILD_URL": "https://teamcity.com/repo", - "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1/DataDog/dogweb.git", - "TEAMCITY_BUILDCONF_NAME": "Test 1", - "TEAMCITY_VERSION": "2022.10 (build 116751)" - }, - { - "ci.job.name": "Test 1", - "ci.job.url": "https://teamcity.com/repo", - "ci.provider.name": "teamcity", - "git.repository_url": "https://1.1.1.1/DataDog/dogweb.git" - } - ], - [ - { - "BUILD_URL": "https://teamcity.com/repo", - "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb.git", - "TEAMCITY_BUILDCONF_NAME": "Test 1", - "TEAMCITY_VERSION": "2022.10 (build 116751)" - }, - { - "ci.job.name": "Test 1", - "ci.job.url": "https://teamcity.com/repo", - "ci.provider.name": "teamcity", - "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb.git" - } - ], - [ - { - "BUILD_URL": "https://teamcity.com/repo", - "DD_GIT_REPOSITORY_URL": "https://user:password@1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git", - "TEAMCITY_BUILDCONF_NAME": "Test 1", - "TEAMCITY_VERSION": "2022.10 (build 116751)" - }, - { - "ci.job.name": "Test 1", - "ci.job.url": "https://teamcity.com/repo", - "ci.provider.name": "teamcity", - "git.repository_url": "https://1.1.1.1:1234/DataDog/dogweb_with_@_yeah.git" - } - ], - [ - { - "BUILD_URL": "https://teamcity.com/repo", - "DD_GIT_REPOSITORY_URL": "ssh://user@host.xz:port/path/to/repo.git/", - "TEAMCITY_BUILDCONF_NAME": "Test 1", - "TEAMCITY_VERSION": "2022.10 (build 116751)" - }, - { - "ci.job.name": "Test 1", - "ci.job.url": "https://teamcity.com/repo", - "ci.provider.name": "teamcity", - "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" - } - ], - [ - { - "BUILD_URL": "https://teamcity.com/repo", - "DD_GIT_REPOSITORY_URL": "ssh://user:password@host.xz:port/path/to/repo.git/", - "TEAMCITY_BUILDCONF_NAME": "Test 1", - "TEAMCITY_VERSION": "2022.10 (build 116751)" - }, - { - "ci.job.name": "Test 1", - "ci.job.url": "https://teamcity.com/repo", - "ci.provider.name": "teamcity", - "git.repository_url": "ssh://host.xz:port/path/to/repo.git/" - } ] ] diff --git a/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json b/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json index 464c4158558..9a151c6c00e 100644 --- a/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json +++ b/packages/dd-trace/test/plugins/util/ci-env/usersupplied.json @@ -155,6 +155,56 @@ "git.tag": "0.0.2" } ], + [ + { + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "https://github.com/DataDog/dogweb", + "DD_TEST_CASE_NAME": "http-repository-url-no-git-suffix" + }, + { + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "https://github.com/DataDog/dogweb" + } + ], + [ + { + "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", + "DD_GIT_COMMIT_AUTHOR_EMAIL": "usersupplied-authoremail", + "DD_GIT_COMMIT_AUTHOR_NAME": "usersupplied-authorname", + "DD_GIT_COMMIT_COMMITTER_DATE": "usersupplied-comitterdate", + "DD_GIT_COMMIT_COMMITTER_EMAIL": "usersupplied-comitteremail", + "DD_GIT_COMMIT_COMMITTER_NAME": "usersupplied-comittername", + "DD_GIT_COMMIT_MESSAGE": "usersupplied-message", + "DD_GIT_COMMIT_SHA": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "DD_GIT_REPOSITORY_URL": "ssh://host.xz:port/path/to/repo/", + "DD_TEST_CASE_NAME": "ssh-repository-url-no-git-suffix" + }, + { + "git.commit.author.date": "usersupplied-authordate", + "git.commit.author.email": "usersupplied-authoremail", + "git.commit.author.name": "usersupplied-authorname", + "git.commit.committer.date": "usersupplied-comitterdate", + "git.commit.committer.email": "usersupplied-comitteremail", + "git.commit.committer.name": "usersupplied-comittername", + "git.commit.message": "usersupplied-message", + "git.commit.sha": "b9f0fb3fdbb94c9d24b2c75b49663122a529e123", + "git.repository_url": "ssh://host.xz:port/path/to/repo/" + } + ], [ { "DD_GIT_COMMIT_AUTHOR_DATE": "usersupplied-authordate", diff --git a/packages/dd-trace/test/plugins/util/test-environment.spec.js b/packages/dd-trace/test/plugins/util/test-environment.spec.js index 4b7856be4bb..181c7f6fe52 100644 --- a/packages/dd-trace/test/plugins/util/test-environment.spec.js +++ b/packages/dd-trace/test/plugins/util/test-environment.spec.js @@ -39,6 +39,7 @@ describe('test environment data', () => { assertions.forEach(([env, expectedSpanTags], index) => { it(`reads env info for spec ${index} from ${ciProvider}`, () => { process.env = env + const { DD_TEST_CASE_NAME: testCaseName } = env const { [CI_ENV_VARS]: envVars, [CI_NODE_LABELS]: nodeLabels, ...restOfTags } = getTestEnvironmentMetadata() const { [CI_ENV_VARS]: expectedEnvVars, @@ -46,7 +47,7 @@ describe('test environment data', () => { ...restOfExpectedTags } = expectedSpanTags - expect(restOfTags).to.contain(restOfExpectedTags) + expect(restOfTags, testCaseName ? `${testCaseName} has failed.` : undefined).to.contain(restOfExpectedTags) // `CI_ENV_VARS` key contains a dictionary, so we do a `eql` comparison if (envVars && expectedEnvVars) { expect(JSON.parse(envVars)).to.eql(JSON.parse(expectedEnvVars)) diff --git a/packages/dd-trace/test/plugins/util/test.spec.js b/packages/dd-trace/test/plugins/util/test.spec.js index 4a992955397..a418af72305 100644 --- a/packages/dd-trace/test/plugins/util/test.spec.js +++ b/packages/dd-trace/test/plugins/util/test.spec.js @@ -177,7 +177,7 @@ describe('metadata validation', () => { [GIT_COMMIT_SHA]: 'abc123' } const invalidMetadata2 = { - [GIT_REPOSITORY_URL]: 'https://datadog.com/repo', + [GIT_REPOSITORY_URL]: 'htps://datadog.com/repo', [CI_PIPELINE_URL]: 'datadog.com', [GIT_COMMIT_SHA]: 'abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123abc123' } @@ -194,7 +194,9 @@ describe('metadata validation', () => { const invalidMetadata5 = { [GIT_REPOSITORY_URL]: '', [CI_PIPELINE_URL]: '', [GIT_COMMIT_SHA]: '' } const invalidMetadatas = [invalidMetadata1, invalidMetadata2, invalidMetadata3, invalidMetadata4, invalidMetadata5] invalidMetadatas.forEach((invalidMetadata) => { - expect(JSON.stringify(removeInvalidMetadata(invalidMetadata))).to.equal(JSON.stringify({})) + expect( + JSON.stringify(removeInvalidMetadata(invalidMetadata)), `${JSON.stringify(invalidMetadata)} is valid` + ).to.equal(JSON.stringify({})) }) }) it('should keep valid metadata', () => { From 5f886905a2cd2f6f3dc52fcef5c0c2c5076e4faf Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Mon, 18 Dec 2023 12:24:45 -0800 Subject: [PATCH 121/147] flush custom metrics before process exit (#3842) --- packages/dd-trace/src/proxy.js | 4 ++ packages/dd-trace/test/custom-metrics-app.js | 11 ++++ packages/dd-trace/test/custom-metrics.spec.js | 62 +++++++++++++++++++ 3 files changed, 77 insertions(+) create mode 100644 packages/dd-trace/test/custom-metrics-app.js create mode 100644 packages/dd-trace/test/custom-metrics.spec.js diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index 91be1fe9ad7..1cbdf582c7c 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -36,6 +36,10 @@ class Tracer extends NoopProxy { setInterval(() => { this.dogstatsd.flush() }, 10 * 1000).unref() + + process.once('beforeExit', () => { + this.dogstatsd.flush() + }) } if (config.spanLeakDebug > 0) { diff --git a/packages/dd-trace/test/custom-metrics-app.js b/packages/dd-trace/test/custom-metrics-app.js new file mode 100644 index 00000000000..c46f41f18b4 --- /dev/null +++ b/packages/dd-trace/test/custom-metrics-app.js @@ -0,0 +1,11 @@ +#!/usr/bin/env node + +/* eslint-disable no-console */ + +console.log('demo app started') + +const tracer = require('../../../').init() + +tracer.dogstatsd.increment('page.views.data') + +console.log('demo app finished') diff --git a/packages/dd-trace/test/custom-metrics.spec.js b/packages/dd-trace/test/custom-metrics.spec.js new file mode 100644 index 00000000000..49725be7e86 --- /dev/null +++ b/packages/dd-trace/test/custom-metrics.spec.js @@ -0,0 +1,62 @@ +'use strict' + +/* eslint-disable no-console */ + +require('./setup/tap') + +const http = require('http') +const path = require('path') +const os = require('os') +const { exec } = require('child_process') + +describe('Custom Metrics', () => { + let httpServer + let httpPort + let metricsData + let sockets + + beforeEach((done) => { + sockets = [] + httpServer = http.createServer((req, res) => { + let httpData = '' + req.on('data', d => { httpData += d.toString() }) + req.on('end', () => { + res.statusCode = 200 + res.end() + if (req.url === '/dogstatsd/v2/proxy') { + metricsData = httpData + } + }) + }).listen(0, () => { + httpPort = httpServer.address().port + if (os.platform() === 'win32') { + done() + return + } + done() + }) + httpServer.on('connection', socket => sockets.push(socket)) + }) + + afterEach(() => { + httpServer.close() + sockets.forEach(socket => socket.destroy()) + }) + + it('should send metrics before process exit', (done) => { + exec(`${process.execPath} ${path.join(__dirname, 'custom-metrics-app.js')}`, { + env: { + DD_TRACE_AGENT_URL: `http://127.0.0.1:${httpPort}` + } + }, (err, stdout, stderr) => { + if (err) return done(err) + if (stdout) console.log(stdout) + if (stderr) console.error(stderr) + + // eslint-disable-next-line no-undef + expect(metricsData.split('#')[0]).to.equal('page.views.data:1|c|') + + done() + }) + }) +}) From f25a26557cbace1f7363a89a5759d3dc0e3e4644 Mon Sep 17 00:00:00 2001 From: Laplie Anderson Date: Tue, 19 Dec 2023 09:55:24 -0500 Subject: [PATCH 122/147] Add variable to select downstream branch (#3890) --- .gitlab-ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 0fd16dae1cb..cb3e51c56b4 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -11,6 +11,9 @@ include: variables: JS_PACKAGE_VERSION: description: "Version to build for .deb and .rpm. Must be already published in NPM" + DOWNSTREAM_BRANCH: + value: "master" + description: "Run a specific datadog-reliability-env branch downstream" .common: &common tags: [ "runner:main", "size:large" ] @@ -50,6 +53,7 @@ deploy_to_reliability_env: allow_failure: true trigger: project: DataDog/apm-reliability/datadog-reliability-env + branch: $DOWNSTREAM_BRANCH variables: UPSTREAM_BRANCH: $CI_COMMIT_REF_NAME UPSTREAM_PROJECT_ID: $CI_PROJECT_ID From 89dad5d337e6b419b27f95859e809b44cb1f829c Mon Sep 17 00:00:00 2001 From: Sam Brenner <106700075+sabrenner@users.noreply.github.com> Date: Tue, 19 Dec 2023 14:47:59 -0500 Subject: [PATCH 123/147] [core] Default Error Tagging for Pages in Next.js (#3892) * default error tagging for next and pageLoad safeguarding --- packages/datadog-instrumentations/src/next.js | 20 +++++++--- packages/datadog-plugin-next/src/index.js | 38 ++++++++++++++++--- .../datadog-plugin-next/test/index.spec.js | 19 ++++++++++ 3 files changed, 66 insertions(+), 11 deletions(-) diff --git a/packages/datadog-instrumentations/src/next.js b/packages/datadog-instrumentations/src/next.js index b4406ba60b0..8c679724382 100644 --- a/packages/datadog-instrumentations/src/next.js +++ b/packages/datadog-instrumentations/src/next.js @@ -65,7 +65,7 @@ function wrapRenderToHTML (renderToHTML) { function wrapRenderErrorToHTML (renderErrorToHTML) { return function (err, req, res, pathname, query) { - return instrument(req, res, () => renderErrorToHTML.apply(this, arguments)) + return instrument(req, res, err, () => renderErrorToHTML.apply(this, arguments)) } } @@ -76,8 +76,8 @@ function wrapRenderToResponse (renderToResponse) { } function wrapRenderErrorToResponse (renderErrorToResponse) { - return function (ctx) { - return instrument(ctx.req, ctx.res, () => renderErrorToResponse.apply(this, arguments)) + return function (ctx, err) { + return instrument(ctx.req, ctx.res, err, () => renderErrorToResponse.apply(this, arguments)) } } @@ -111,13 +111,23 @@ function getPageFromPath (page, dynamicRoutes = []) { return getPagePath(page) } -function instrument (req, res, handler) { +function instrument (req, res, error, handler) { + if (typeof error === 'function') { + handler = error + error = null + } + req = req.originalRequest || req res = res.originalResponse || res // TODO support middleware properly in the future? const isMiddleware = req.headers[MIDDLEWARE_HEADER] - if (isMiddleware || requests.has(req)) return handler() + if (isMiddleware || requests.has(req)) { + if (error) { + errorChannel.publish({ error }) + } + return handler() + } requests.add(req) diff --git a/packages/datadog-plugin-next/src/index.js b/packages/datadog-plugin-next/src/index.js index 4bd1c21f984..5cb0fec304b 100644 --- a/packages/datadog-plugin-next/src/index.js +++ b/packages/datadog-plugin-next/src/index.js @@ -6,6 +6,8 @@ const analyticsSampler = require('../../dd-trace/src/analytics_sampler') const { COMPONENT } = require('../../dd-trace/src/constants') const web = require('../../dd-trace/src/plugins/util/web') +const errorPages = ['/404', '/500', '/_error', '/_not-found'] + class NextPlugin extends ServerPlugin { static get id () { return 'next' @@ -40,6 +42,13 @@ class NextPlugin extends ServerPlugin { } error ({ span, error }) { + if (!span) { + const store = storage.getStore() + if (!store) return + + span = store.span + } + this.addError(error, span) } @@ -50,10 +59,20 @@ class NextPlugin extends ServerPlugin { const span = store.span const error = span.context()._tags['error'] - - if (!this.config.validateStatus(res.statusCode) && !error) { - span.setTag('error', req.error || nextRequest.error || true) - web.addError(req, req.error || nextRequest.error || true) + const requestError = req.error || nextRequest.error + + if (requestError) { + // prioritize user-set errors from API routes + span.setTag('error', requestError) + web.addError(req, requestError) + } else if (error) { + // general error handling + span.setTag('error', error) + web.addError(req, requestError || error) + } else if (!this.config.validateStatus(res.statusCode)) { + // where there's no error, we still need to validate status + span.setTag('error', true) + web.addError(req, true) } span.addTags({ @@ -73,14 +92,21 @@ class NextPlugin extends ServerPlugin { const span = store.span const req = this._requests.get(span) + // safeguard against missing req in complicated timeout scenarios + if (!req) return + // Only use error page names if there's not already a name const current = span.context()._tags['next.page'] - if (current && ['/404', '/500', '/_error', '/_not-found'].includes(page)) { + const isErrorPage = errorPages.includes(page) + + if (current && isErrorPage) { return } // remove ending /route or /page for appDir projects - if (isAppPath) page = page.substring(0, page.lastIndexOf('/')) + // need to check if not an error page too, as those are marked as app directory + // in newer versions + if (isAppPath && !isErrorPage) page = page.substring(0, page.lastIndexOf('/')) // handle static resource if (isStatic) { diff --git a/packages/datadog-plugin-next/test/index.spec.js b/packages/datadog-plugin-next/test/index.spec.js index d03668bcb2a..d12fb50bdd2 100644 --- a/packages/datadog-plugin-next/test/index.spec.js +++ b/packages/datadog-plugin-next/test/index.spec.js @@ -344,6 +344,25 @@ describe('Plugin', function () { .get(`http://127.0.0.1:${port}/hello/world`) .catch(done) }) + + it('should attach errors by default', done => { + agent + .use(traces => { + const spans = traces[0] + + expect(spans[1]).to.have.property('name', 'next.request') + expect(spans[1]).to.have.property('error', 1) + + expect(spans[1].meta).to.have.property('http.status_code', '500') + expect(spans[1].meta).to.have.property('error.message', 'fail') + expect(spans[1].meta).to.have.property('error.type', 'Error') + expect(spans[1].meta['error.stack']).to.exist + }) + .then(done) + .catch(done) + + axios.get(`http://127.0.0.1:${port}/error/get_server_side_props`) + }) }) describe('for static files', () => { From 96349a89d5ade9f92a85d1bc2b6ce4c26a3aad17 Mon Sep 17 00:00:00 2001 From: Roberto Montero <108007532+robertomonteromiguel@users.noreply.github.com> Date: Wed, 20 Dec 2023 09:36:10 +0100 Subject: [PATCH 124/147] =?UTF-8?q?Configure=20parametric=20test=20to=20ge?= =?UTF-8?q?t=20the=20nodejs=20tracer=20in=20same=20way=20as=20sys=E2=80=A6?= =?UTF-8?q?=20(#3889)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Configure parametric test to get the nodejs tracer in same way as system-tests --- .github/workflows/system-tests.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index b37230370c2..30503e8452b 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -62,6 +62,7 @@ jobs: runs-on: ubuntu-latest env: TEST_LIBRARY: nodejs + #keep this line until system-tests -> robertomonteromiguel/parametric_use_load_binary_nodejs merged NODEJS_DDTRACE_MODULE: datadog/dd-trace-js#${{ github.sha }} steps: - name: Checkout system tests @@ -71,6 +72,10 @@ jobs: - uses: actions/setup-python@v4 with: python-version: '3.9' + - name: Checkout dd-trace-js + uses: actions/checkout@v3 + with: + path: 'binaries/dd-trace-js' - name: Build run: ./build.sh -i runner - name: Run From de52a53534b52bc6e8516a2f1e1db2e3ca014cf3 Mon Sep 17 00:00:00 2001 From: Julio Gonzalez <107922352+hoolioh@users.noreply.github.com> Date: Wed, 20 Dec 2023 10:43:32 +0100 Subject: [PATCH 125/147] GraphQL Blocking (#3819) * Upload module skeleton. * Blocking in apollo, very very first version * Move graphql implementation to another module. * Blocking for apollo-server-core, ugly but it works, lets find a better way * Use real blocking data * Set blocking to true. * Throw before resolver execution in order to stop the operation's execution flow. * Use HttpQueryError in apollo-server-core * Blocking test in apollo-server-fastify * Refactor graphql blocking. * Remove previous implementation which only supported monitoring. * Add new waf address in order to check the payload of every resolver. * Use apm start resolver address instead of a new one. * Remove mock and perform an actual call to the waf. * Add non blocking graphql test * Move abortController constructor to context creation. This reduces the performance overhead due to just one instance is shared across the whole query exectution. * Add pollo-server-express block tests * Add unit tests. * Add @apollo/server tests * Update test rules for blocking by `graphql.server.resolver` * Block with graphql templates data * Add tests. * Block with graphql data in graphql endpoint * Fix tests. * Execute @apollo/server and apollo-server-express tests * Unify code in @apollo/server and apollo-server-core * update comments * Add appsec.blocked tag in blocked requests * Add test with non graphql block response * Tests for block with redirect * Prevent creation of resolve span when it is blocked before the execution of the resolve code * Refactor addResolver in order to get directives information. * Add tests to block on directives. * Add test for directives. * Undo prevent creating resolve span * Configurable graphql blocking json * Refactor graphql * Using resolver instead of resolvers. * Change graphql channel name to be consistent with the others. * Small changes in blocking * Move resover information resolution to plugin. * Revert "Move resover information resolution to plugin." This reverts commit 7cc8561bc567a385088959bcf65deaccdb5f0ec0. * Remove resolver information from context, pass it in a different field instead. * Throw custom exception rather than send an empty array. * Update packages/datadog-instrumentations/src/graphql.js Co-authored-by: Ugaitz Urien * Change a bit apollo-server-core instrumentation * Protect Header map, if in future version it is moved/removed, prevent breaks * Remove some duplicated code * Update packages/datadog-instrumentations/src/apollo-server.js Co-authored-by: Carles Capell <107924659+CarlesDD@users.noreply.github.com> * Fix comments in the PR * Fix PR comments. * Fix some comments in the PR * Move resolver information formatting to the plugin. * Fix PR comments. * Fix proper use of Promise.race. --------- Co-authored-by: Ugaitz Urien Co-authored-by: Carles Capell <107924659+CarlesDD@users.noreply.github.com> --- .github/workflows/appsec.yml | 14 ++ docs/test.ts | 1 + index.d.ts | 5 + integration-tests/graphql.spec.js | 5 +- integration-tests/graphql/graphql-rules.json | 5 +- .../src/apollo-server-core.js | 41 +++ .../src/apollo-server.js | 83 ++++++ .../datadog-instrumentations/src/graphql.js | 22 +- .../src/helpers/hooks.js | 2 + .../datadog-plugin-graphql/src/resolve.js | 44 ++-- packages/dd-trace/src/appsec/addresses.js | 1 + .../dd-trace/src/appsec/blocked_templates.js | 5 +- packages/dd-trace/src/appsec/blocking.js | 138 ++++++---- packages/dd-trace/src/appsec/channels.js | 5 +- packages/dd-trace/src/appsec/graphql.js | 146 +++++++++++ packages/dd-trace/src/appsec/index.js | 20 +- packages/dd-trace/src/config.js | 5 + .../dd-trace/test/appsec/blocking.spec.js | 40 +-- .../dd-trace/test/appsec/graphq.test-utils.js | 228 +++++++++++++++++ .../test/appsec/graphql-rules-redirect.json | 49 ++++ .../dd-trace/test/appsec/graphql-rules.json | 6 + ...aphql.apollo-server-express.plugin.spec.js | 60 +++++ ...aphql.apollo-server-fastify.plugin.spec.js | 60 +++++ .../graphql.apollo-server.plugin.spec.js | 44 ++++ .../dd-trace/test/appsec/graphql.block.json | 7 + packages/dd-trace/test/appsec/graphql.spec.js | 237 ++++++++++++++++++ packages/dd-trace/test/appsec/index.spec.js | 79 +----- packages/dd-trace/test/config.spec.js | 22 +- .../appsec-blocked-graphql-template.json | 5 + packages/dd-trace/test/plugins/externals.json | 36 +++ 30 files changed, 1238 insertions(+), 177 deletions(-) create mode 100644 packages/datadog-instrumentations/src/apollo-server-core.js create mode 100644 packages/datadog-instrumentations/src/apollo-server.js create mode 100644 packages/dd-trace/src/appsec/graphql.js create mode 100644 packages/dd-trace/test/appsec/graphq.test-utils.js create mode 100644 packages/dd-trace/test/appsec/graphql-rules-redirect.json create mode 100644 packages/dd-trace/test/appsec/graphql.apollo-server-express.plugin.spec.js create mode 100644 packages/dd-trace/test/appsec/graphql.apollo-server-fastify.plugin.spec.js create mode 100644 packages/dd-trace/test/appsec/graphql.apollo-server.plugin.spec.js create mode 100644 packages/dd-trace/test/appsec/graphql.block.json create mode 100644 packages/dd-trace/test/appsec/graphql.spec.js create mode 100644 packages/dd-trace/test/fixtures/config/appsec-blocked-graphql-template.json diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index f37acbe97bc..a0e22b28ff8 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -135,6 +135,20 @@ jobs: - run: yarn test:appsec:plugins:ci - uses: codecov/codecov-action@v2 + graphql: + runs-on: ubuntu-latest + env: + PLUGINS: apollo-server|apollo-server-express|apollo-server-fastify|apollo-server-core + steps: + - uses: actions/checkout@v2 + - uses: ./.github/actions/node/setup + - run: yarn install + - uses: ./.github/actions/node/oldest + - run: yarn test:appsec:plugins:ci + - uses: ./.github/actions/node/latest + - run: yarn test:appsec:plugins:ci + - uses: codecov/codecov-action@v2 + mongodb-core: runs-on: ubuntu-latest services: diff --git a/docs/test.ts b/docs/test.ts index 3c9342a0bba..865de552afa 100644 --- a/docs/test.ts +++ b/docs/test.ts @@ -108,6 +108,7 @@ tracer.init({ obfuscatorValueRegex: '.*', blockedTemplateHtml: './blocked.html', blockedTemplateJson: './blocked.json', + blockedTemplateGraphql: './blockedgraphql.json', eventTracking: { mode: 'safe' }, diff --git a/index.d.ts b/index.d.ts index f84328ea25c..9396d3bada8 100644 --- a/index.d.ts +++ b/index.d.ts @@ -567,6 +567,11 @@ export declare interface TracerOptions { */ blockedTemplateJson?: string, + /** + * Specifies a path to a custom blocking template json file for graphql requests + */ + blockedTemplateGraphql?: string, + /** * Controls the automated user event tracking configuration */ diff --git a/integration-tests/graphql.spec.js b/integration-tests/graphql.spec.js index 86e846ab460..4a7c8e14917 100644 --- a/integration-tests/graphql.spec.js +++ b/integration-tests/graphql.spec.js @@ -79,7 +79,6 @@ describe('graphql', () => { { id: 'test-rule-id-1', name: 'test-rule-name-1', - on_match: ['block'], tags: { category: 'attack_attempt', @@ -92,8 +91,8 @@ describe('graphql', () => { operator_value: '', parameters: [ { - address: 'graphql.server.all_resolvers', - key_path: ['images', '0', 'category'], + address: 'graphql.server.resolver', + key_path: ['images', 'category'], value: 'testattack', highlight: ['testattack'] } diff --git a/integration-tests/graphql/graphql-rules.json b/integration-tests/graphql/graphql-rules.json index e258dda5226..1073c3d05a2 100644 --- a/integration-tests/graphql/graphql-rules.json +++ b/integration-tests/graphql/graphql-rules.json @@ -17,6 +17,9 @@ "inputs": [ { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" } ], "list": [ @@ -27,7 +30,7 @@ } ], "transformers": ["lowercase"], - "on_match": ["block"] + "on_match": [] } ] } diff --git a/packages/datadog-instrumentations/src/apollo-server-core.js b/packages/datadog-instrumentations/src/apollo-server-core.js new file mode 100644 index 00000000000..52db88973d7 --- /dev/null +++ b/packages/datadog-instrumentations/src/apollo-server-core.js @@ -0,0 +1,41 @@ +'use strict' + +const { AbortController } = require('node-abort-controller') +const { addHook } = require('./helpers/instrument') +const shimmer = require('../../datadog-shimmer') +const dc = require('dc-polyfill') + +const requestChannel = dc.tracingChannel('datadog:apollo-server-core:request') + +addHook({ name: 'apollo-server-core', file: 'dist/runHttpQuery.js', versions: ['>3.0.0'] }, runHttpQueryModule => { + const HttpQueryError = runHttpQueryModule.HttpQueryError + + shimmer.wrap(runHttpQueryModule, 'runHttpQuery', function wrapRunHttpQuery (originalRunHttpQuery) { + return async function runHttpQuery () { + if (!requestChannel.start.hasSubscribers) { + return originalRunHttpQuery.apply(this, arguments) + } + + const abortController = new AbortController() + const abortData = {} + + const runHttpQueryResult = requestChannel.tracePromise( + originalRunHttpQuery, + { abortController, abortData }, + this, + ...arguments) + + const abortPromise = new Promise((resolve, reject) => { + abortController.signal.addEventListener('abort', (event) => { + // runHttpQuery callbacks are writing the response on resolve/reject. + // We should return blocking data in the apollo-server-core HttpQueryError object + reject(new HttpQueryError(abortData.statusCode, abortData.message, true, abortData.headers)) + }, { once: true }) + }) + + return Promise.race([runHttpQueryResult, abortPromise]) + } + }) + + return runHttpQueryModule +}) diff --git a/packages/datadog-instrumentations/src/apollo-server.js b/packages/datadog-instrumentations/src/apollo-server.js new file mode 100644 index 00000000000..c0f4b2bbf69 --- /dev/null +++ b/packages/datadog-instrumentations/src/apollo-server.js @@ -0,0 +1,83 @@ +'use strict' + +const { AbortController } = require('node-abort-controller') +const dc = require('dc-polyfill') + +const { addHook } = require('./helpers/instrument') +const shimmer = require('../../datadog-shimmer') + +const graphqlMiddlewareChannel = dc.tracingChannel('datadog:apollo:middleware') + +const requestChannel = dc.tracingChannel('datadog:apollo:request') + +let HeaderMap + +function wrapExecuteHTTPGraphQLRequest (originalExecuteHTTPGraphQLRequest) { + return async function executeHTTPGraphQLRequest () { + if (!HeaderMap || !requestChannel.start.hasSubscribers) { + return originalExecuteHTTPGraphQLRequest.apply(this, arguments) + } + + const abortController = new AbortController() + const abortData = {} + + const graphqlResponseData = requestChannel.tracePromise( + originalExecuteHTTPGraphQLRequest, + { abortController, abortData }, + this, + ...arguments) + + const abortPromise = new Promise((resolve, reject) => { + abortController.signal.addEventListener('abort', (event) => { + // This method is expected to return response data + // with headers, status and body + const headers = new HeaderMap() + Object.keys(abortData.headers).forEach(key => { + headers.set(key, abortData.headers[key]) + }) + + resolve({ + headers: headers, + status: abortData.statusCode, + body: { + kind: 'complete', + string: abortData.message + } + }) + }, { once: true }) + }) + + return Promise.race([abortPromise, graphqlResponseData]) + } +} + +function apolloExpress4Hook (express4) { + shimmer.wrap(express4, 'expressMiddleware', function wrapExpressMiddleware (originalExpressMiddleware) { + return function expressMiddleware (server, options) { + const originalMiddleware = originalExpressMiddleware.apply(this, arguments) + + return shimmer.wrap(originalMiddleware, function (req, res, next) { + if (!graphqlMiddlewareChannel.start.hasSubscribers) { + return originalMiddleware.apply(this, arguments) + } + + return graphqlMiddlewareChannel.traceSync(originalMiddleware, { req }, this, ...arguments) + }) + } + }) + return express4 +} + +function apolloHeaderMapHook (headerMap) { + HeaderMap = headerMap.HeaderMap + return headerMap +} + +function apolloServerHook (apolloServer) { + shimmer.wrap(apolloServer.ApolloServer.prototype, 'executeHTTPGraphQLRequest', wrapExecuteHTTPGraphQLRequest) + return apolloServer +} + +addHook({ name: '@apollo/server', file: 'dist/cjs/ApolloServer.js', versions: ['>=4.0.0'] }, apolloServerHook) +addHook({ name: '@apollo/server', file: 'dist/cjs/express4/index.js', versions: ['>=4.0.0'] }, apolloExpress4Hook) +addHook({ name: '@apollo/server', file: 'dist/cjs/utils/HeaderMap.js', versions: ['>=4.0.0'] }, apolloHeaderMapHook) diff --git a/packages/datadog-instrumentations/src/graphql.js b/packages/datadog-instrumentations/src/graphql.js index 6d4a18f17cb..11e917a30a9 100644 --- a/packages/datadog-instrumentations/src/graphql.js +++ b/packages/datadog-instrumentations/src/graphql.js @@ -1,5 +1,7 @@ 'use strict' +const { AbortController } = require('node-abort-controller') + const { addHook, channel, @@ -37,6 +39,13 @@ const validateStartCh = channel('apm:graphql:validate:start') const validateFinishCh = channel('apm:graphql:validate:finish') const validateErrorCh = channel('apm:graphql:validate:error') +class AbortError extends Error { + constructor (message) { + super(message) + this.name = 'AbortError' + } +} + function getOperation (document, operationName) { if (!document || !Array.isArray(document.definitions)) { return @@ -175,11 +184,11 @@ function wrapExecute (execute) { docSource: documentSources.get(document) }) - const context = { source, asyncResource, fields: {} } + const context = { source, asyncResource, fields: {}, abortController: new AbortController() } contexts.set(contextValue, context) - return callInAsyncScope(exe, asyncResource, this, arguments, (err, res) => { + return callInAsyncScope(exe, asyncResource, this, arguments, context.abortController, (err, res) => { if (finishResolveCh.hasSubscribers) finishResolvers(context) const error = err || (res && res.errors && res.errors[0]) @@ -207,7 +216,7 @@ function wrapResolve (resolve) { const field = assertField(context, info, args) - return callInAsyncScope(resolve, field.asyncResource, this, arguments, (err) => { + return callInAsyncScope(resolve, field.asyncResource, this, arguments, context.abortController, (err) => { updateFieldCh.publish({ field, info, err }) }) } @@ -217,10 +226,15 @@ function wrapResolve (resolve) { return resolveAsync } -function callInAsyncScope (fn, aR, thisArg, args, cb) { +function callInAsyncScope (fn, aR, thisArg, args, abortController, cb) { cb = cb || (() => {}) return aR.runInAsyncScope(() => { + if (abortController?.signal.aborted) { + cb(null, null) + throw new AbortError('Aborted') + } + try { const result = fn.apply(thisArg, args) if (result && typeof result.then === 'function') { diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js index bd409dcaa01..ad572e41090 100644 --- a/packages/datadog-instrumentations/src/helpers/hooks.js +++ b/packages/datadog-instrumentations/src/helpers/hooks.js @@ -1,6 +1,8 @@ 'use strict' module.exports = { + '@apollo/server': () => require('../apollo-server'), + 'apollo-server-core': () => require('../apollo-server-core'), '@aws-sdk/smithy-client': () => require('../aws-sdk'), '@cucumber/cucumber': () => require('../cucumber'), '@playwright/test': () => require('../playwright'), diff --git a/packages/datadog-plugin-graphql/src/resolve.js b/packages/datadog-plugin-graphql/src/resolve.js index caca7c96e3e..84c2377fca1 100644 --- a/packages/datadog-plugin-graphql/src/resolve.js +++ b/packages/datadog-plugin-graphql/src/resolve.js @@ -1,6 +1,7 @@ 'use strict' const TracingPlugin = require('../../dd-trace/src/plugins/tracing') +const dc = require('dc-polyfill') const collapsedPathSym = Symbol('collapsedPaths') @@ -14,8 +15,6 @@ class GraphQLResolvePlugin extends TracingPlugin { if (!shouldInstrument(this.config, path)) return const computedPathString = path.join('.') - addResolver(context, info, args) - if (this.config.collapse) { if (!context[collapsedPathSym]) { context[collapsedPathSym] = {} @@ -55,6 +54,10 @@ class GraphQLResolvePlugin extends TracingPlugin { span.setTag(`graphql.variables.${name}`, variables[name]) }) } + + if (this.resolverStartCh.hasSubscribers) { + this.resolverStartCh.publish({ context, resolverInfo: getResolverInfo(info, args) }) + } } constructor (...args) { @@ -69,6 +72,8 @@ class GraphQLResolvePlugin extends TracingPlugin { field.finishTime = span._getTime ? span._getTime() : 0 field.error = field.error || err }) + + this.resolverStartCh = dc.channel('datadog:graphql:resolver:start') } configure (config) { @@ -109,28 +114,31 @@ function withCollapse (responsePathAsArray) { } } -function addResolver (context, info, args) { - if (info.rootValue && !info.rootValue[info.fieldName]) { - return - } +function getResolverInfo (info, args) { + let resolverInfo = null + const resolverVars = {} - if (!context.resolvers) { - context.resolvers = {} + if (args && Object.keys(args).length) { + Object.assign(resolverVars, args) } - const resolvers = context.resolvers - - if (!resolvers[info.fieldName]) { - if (args && Object.keys(args).length) { - resolvers[info.fieldName] = [args] - } else { - resolvers[info.fieldName] = [] + const directives = info.fieldNodes[0].directives + for (const directive of directives) { + const argList = {} + for (const argument of directive['arguments']) { + argList[argument.name.value] = argument.value.value } - } else { - if (args && Object.keys(args).length) { - resolvers[info.fieldName].push(args) + + if (Object.keys(argList).length) { + resolverVars[directive.name.value] = argList } } + + if (Object.keys(resolverVars).length) { + resolverInfo = { [info.fieldName]: resolverVars } + } + + return resolverInfo } module.exports = GraphQLResolvePlugin diff --git a/packages/dd-trace/src/appsec/addresses.js b/packages/dd-trace/src/appsec/addresses.js index a4d47243a67..343a4b2cbba 100644 --- a/packages/dd-trace/src/appsec/addresses.js +++ b/packages/dd-trace/src/appsec/addresses.js @@ -13,6 +13,7 @@ module.exports = { HTTP_INCOMING_RESPONSE_HEADERS: 'server.response.headers.no_cookies', // TODO: 'server.response.trailers', HTTP_INCOMING_GRAPHQL_RESOLVERS: 'graphql.server.all_resolvers', + HTTP_INCOMING_GRAPHQL_RESOLVER: 'graphql.server.resolver', HTTP_CLIENT_IP: 'http.client_ip', diff --git a/packages/dd-trace/src/appsec/blocked_templates.js b/packages/dd-trace/src/appsec/blocked_templates.js index 7dcd1ffe519..1eb62e22df0 100644 --- a/packages/dd-trace/src/appsec/blocked_templates.js +++ b/packages/dd-trace/src/appsec/blocked_templates.js @@ -5,7 +5,10 @@ const html = `= 400) { statusCode = 303 } - - res.writeHead(statusCode, { + const headers = { 'Location': blockingConfiguration.parameters.location - }).end() + } + + rootSpan.addTags({ + 'appsec.blocked': 'true' + }) - if (abortController) { - abortController.abort() + return { headers, statusCode } +} + +function getSpecificBlockingData (type) { + switch (type) { + case specificBlockingTypes.GRAPHQL: + return { + type: 'application/json', + body: templateGraphqlJson + } } } -function blockWithContent (req, res, rootSpan, abortController) { +function getBlockWithContentData (req, specificType, rootSpan) { let type let body + let statusCode - // parse the Accept header, ex: Accept: text/html, application/xhtml+xml, application/xml;q=0.9, */*;q=0.8 - const accept = req.headers.accept && req.headers.accept.split(',').map((str) => str.split(';', 1)[0].trim()) + const specificBlockingType = specificType || detectedSpecificEndpoints[getSpecificKey(req.method, req.url)] + if (specificBlockingType) { + const specificBlockingContent = getSpecificBlockingData(specificBlockingType) + type = specificBlockingContent?.type + body = specificBlockingContent?.body + } - if (!blockingConfiguration || blockingConfiguration.parameters.type === 'auto') { - if (accept && accept.includes('text/html') && !accept.includes('application/json')) { - type = 'text/html; charset=utf-8' - body = templateHtml + if (!type) { + // parse the Accept header, ex: Accept: text/html, application/xhtml+xml, application/xml;q=0.9, */*;q=0.8 + const accept = req.headers.accept?.split(',').map((str) => str.split(';', 1)[0].trim()) + + if (!blockingConfiguration || blockingConfiguration.parameters.type === 'auto') { + if (accept?.includes('text/html') && !accept.includes('application/json')) { + type = 'text/html; charset=utf-8' + body = templateHtml + } else { + type = 'application/json' + body = templateJson + } } else { - type = 'application/json' - body = templateJson + if (blockingConfiguration.parameters.type === 'html') { + type = 'text/html; charset=utf-8' + body = templateHtml + } else { + type = 'application/json' + body = templateJson + } } + } + + if (blockingConfiguration?.type === 'block_request' && blockingConfiguration.parameters.status_code) { + statusCode = blockingConfiguration.parameters.status_code } else { - if (blockingConfiguration.parameters.type === 'html') { - type = 'text/html; charset=utf-8' - body = templateHtml - } else { - type = 'application/json' - body = templateJson - } + statusCode = 403 + } + + const headers = { + 'Content-Type': type, + 'Content-Length': Buffer.byteLength(body) } rootSpan.addTags({ 'appsec.blocked': 'true' }) - if (blockingConfiguration && blockingConfiguration.type === 'block_request' && - blockingConfiguration.parameters.status_code) { - res.statusCode = blockingConfiguration.parameters.status_code - } else { - res.statusCode = 403 - } - res.setHeader('Content-Type', type) - res.setHeader('Content-Length', Buffer.byteLength(body)) - res.end(body) + return { body, statusCode, headers } +} - if (abortController) { - abortController.abort() +function getBlockingData (req, specificType, rootSpan) { + if (blockingConfiguration?.type === 'redirect_request' && blockingConfiguration.parameters.location) { + return getBlockWithRedirectData(rootSpan) + } else { + return getBlockWithContentData(req, specificType, rootSpan) } } -function block (req, res, rootSpan, abortController) { +function block (req, res, rootSpan, abortController, type) { if (res.headersSent) { log.warn('Cannot send blocking response when headers have already been sent') return } - if (blockingConfiguration && blockingConfiguration.type === 'redirect_request' && - blockingConfiguration.parameters.location) { - blockWithRedirect(res, rootSpan, abortController) - } else { - blockWithContent(req, res, rootSpan, abortController) - } + const { body, headers, statusCode } = getBlockingData(req, type, rootSpan) + + res.writeHead(statusCode, headers).end(body) + + abortController?.abort() } function setTemplates (config) { if (config.appsec.blockedTemplateHtml) { templateHtml = config.appsec.blockedTemplateHtml + } else { + templateHtml = blockedTemplates.html } + if (config.appsec.blockedTemplateJson) { templateJson = config.appsec.blockedTemplateJson + } else { + templateJson = blockedTemplates.json + } + + if (config.appsec.blockedTemplateGraphql) { + templateGraphqlJson = config.appsec.blockedTemplateGraphql + } else { + templateGraphqlJson = blockedTemplates.graphqlJson } } @@ -98,7 +147,10 @@ function updateBlockingConfiguration (newBlockingConfiguration) { } module.exports = { + addSpecificEndpoint, block, + specificBlockingTypes, + getBlockingData, setTemplates, updateBlockingConfiguration } diff --git a/packages/dd-trace/src/appsec/channels.js b/packages/dd-trace/src/appsec/channels.js index cf31b12d233..f5832f2986c 100644 --- a/packages/dd-trace/src/appsec/channels.js +++ b/packages/dd-trace/src/appsec/channels.js @@ -6,7 +6,10 @@ const dc = require('dc-polyfill') module.exports = { bodyParser: dc.channel('datadog:body-parser:read:finish'), cookieParser: dc.channel('datadog:cookie-parser:read:finish'), - graphqlFinishExecute: dc.channel('apm:graphql:execute:finish'), + startGraphqlResolve: dc.channel('datadog:graphql:resolver:start'), + graphqlMiddlewareChannel: dc.tracingChannel('datadog:apollo:middleware'), + apolloChannel: dc.tracingChannel('datadog:apollo:request'), + apolloServerCoreChannel: dc.tracingChannel('datadog:apollo-server-core:request'), incomingHttpRequestStart: dc.channel('dd-trace:incomingHttpRequestStart'), incomingHttpRequestEnd: dc.channel('dd-trace:incomingHttpRequestEnd'), passportVerify: dc.channel('datadog:passport:verify:finish'), diff --git a/packages/dd-trace/src/appsec/graphql.js b/packages/dd-trace/src/appsec/graphql.js new file mode 100644 index 00000000000..cf819e4382f --- /dev/null +++ b/packages/dd-trace/src/appsec/graphql.js @@ -0,0 +1,146 @@ +'use strict' + +const { storage } = require('../../../datadog-core') +const { addSpecificEndpoint, specificBlockingTypes, getBlockingData } = require('./blocking') +const waf = require('./waf') +const addresses = require('./addresses') +const web = require('../plugins/util/web') +const { + startGraphqlResolve, + graphqlMiddlewareChannel, + apolloChannel, + apolloServerCoreChannel +} = require('./channels') + +const graphqlRequestData = new WeakMap() + +function enable () { + enableApollo() + enableGraphql() +} + +function disable () { + disableApollo() + disableGraphql() +} + +function onGraphqlStartResolve ({ context, resolverInfo }) { + const req = storage.getStore()?.req + + if (!req) return + + if (!resolverInfo || typeof resolverInfo !== 'object') return + + const actions = waf.run({ [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo }, req) + if (actions?.includes('block')) { + const requestData = graphqlRequestData.get(req) + if (requestData?.isInGraphqlRequest) { + requestData.blocked = true + context?.abortController?.abort() + } + } +} + +function enterInApolloMiddleware (data) { + const req = data?.req || storage.getStore()?.req + if (!req) return + + graphqlRequestData.set(req, { + inApolloMiddleware: true, + blocked: false + }) +} + +function enterInApolloServerCoreRequest () { + const req = storage.getStore()?.req + if (!req) return + + graphqlRequestData.set(req, { + isInGraphqlRequest: true, + blocked: false + }) +} + +function exitFromApolloMiddleware (data) { + const req = data?.req || storage.getStore()?.req + const requestData = graphqlRequestData.get(req) + if (requestData) requestData.inApolloMiddleware = false +} + +function enterInApolloRequest () { + const req = storage.getStore()?.req + + const requestData = graphqlRequestData.get(req) + if (requestData?.inApolloMiddleware) { + requestData.isInGraphqlRequest = true + addSpecificEndpoint(req.method, req.originalUrl || req.url, specificBlockingTypes.GRAPHQL) + } +} + +function beforeWriteApolloGraphqlResponse ({ abortController, abortData }) { + const req = storage.getStore()?.req + if (!req) return + + const requestData = graphqlRequestData.get(req) + + if (requestData?.blocked) { + const rootSpan = web.root(req) + if (!rootSpan) return + + const blockingData = getBlockingData(req, specificBlockingTypes.GRAPHQL, rootSpan) + abortData.statusCode = blockingData.statusCode + abortData.headers = blockingData.headers + abortData.message = blockingData.body + + abortController?.abort() + } + + graphqlRequestData.delete(req) +} + +function enableApollo () { + graphqlMiddlewareChannel.subscribe({ + start: enterInApolloMiddleware, + end: exitFromApolloMiddleware + }) + + apolloServerCoreChannel.subscribe({ + start: enterInApolloServerCoreRequest, + asyncEnd: beforeWriteApolloGraphqlResponse + }) + + apolloChannel.subscribe({ + start: enterInApolloRequest, + asyncEnd: beforeWriteApolloGraphqlResponse + }) +} + +function disableApollo () { + graphqlMiddlewareChannel.unsubscribe({ + start: enterInApolloMiddleware, + end: exitFromApolloMiddleware + }) + + apolloServerCoreChannel.unsubscribe({ + start: enterInApolloServerCoreRequest, + asyncEnd: beforeWriteApolloGraphqlResponse + }) + + apolloChannel.unsubscribe({ + start: enterInApolloRequest, + asyncEnd: beforeWriteApolloGraphqlResponse + }) +} + +function enableGraphql () { + startGraphqlResolve.subscribe(onGraphqlStartResolve) +} + +function disableGraphql () { + if (startGraphqlResolve.hasSubscribers) startGraphqlResolve.unsubscribe(onGraphqlStartResolve) +} + +module.exports = { + enable, + disable +} diff --git a/packages/dd-trace/src/appsec/index.js b/packages/dd-trace/src/appsec/index.js index 386918636cc..f072cc044b0 100644 --- a/packages/dd-trace/src/appsec/index.js +++ b/packages/dd-trace/src/appsec/index.js @@ -6,7 +6,6 @@ const remoteConfig = require('./remote_config') const { bodyParser, cookieParser, - graphqlFinishExecute, incomingHttpRequestStart, incomingHttpRequestEnd, passportVerify, @@ -24,6 +23,7 @@ const { HTTP_CLIENT_IP } = require('../../../../ext/tags') const { block, setTemplates } = require('./blocking') const { passportTrackEvent } = require('./passport') const { storage } = require('../../../datadog-core') +const graphql = require('./graphql') let isEnabled = false let config @@ -41,6 +41,7 @@ function enable (_config) { try { appsecTelemetry.enable(_config.telemetry) + graphql.enable() setTemplates(_config) @@ -57,7 +58,6 @@ function enable (_config) { nextQueryParsed.subscribe(onRequestQueryParsed) queryParser.subscribe(onRequestQueryParsed) cookieParser.subscribe(onRequestCookieParser) - graphqlFinishExecute.subscribe(onGraphqlFinishExecute) if (_config.appsec.eventTracking.enabled) { passportVerify.subscribe(onPassportVerify) @@ -205,20 +205,6 @@ function onPassportVerify ({ credentials, user }) { passportTrackEvent(credentials, user, rootSpan, config.appsec.eventTracking.mode) } -function onGraphqlFinishExecute ({ context }) { - const store = storage.getStore() - const req = store?.req - - if (!req) return - - const resolvers = context?.resolvers - - if (!resolvers || typeof resolvers !== 'object') return - - // Don't collect blocking result because it only works in monitor mode. - waf.run({ [addresses.HTTP_INCOMING_GRAPHQL_RESOLVERS]: resolvers }, req) -} - function handleResults (actions, req, res, rootSpan, abortController) { if (!actions || !req || !res || !rootSpan || !abortController) return @@ -234,12 +220,12 @@ function disable () { RuleManager.clearAllRules() appsecTelemetry.disable() + graphql.disable() remoteConfig.disableWafUpdate() // Channel#unsubscribe() is undefined for non active channels if (bodyParser.hasSubscribers) bodyParser.unsubscribe(onRequestBodyParsed) - if (graphqlFinishExecute.hasSubscribers) graphqlFinishExecute.unsubscribe(onGraphqlFinishExecute) if (incomingHttpRequestStart.hasSubscribers) incomingHttpRequestStart.unsubscribe(incomingHttpStartTranslator) if (incomingHttpRequestEnd.hasSubscribers) incomingHttpRequestEnd.unsubscribe(incomingHttpEndTranslator) if (queryParser.hasSubscribers) queryParser.unsubscribe(onRequestQueryParsed) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index bc03636f6ef..fb5615bdf17 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -435,6 +435,10 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) maybeFile(appsec.blockedTemplateJson), maybeFile(process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON) ) + const DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON = coalesce( + maybeFile(appsec.blockedTemplateGraphql), + maybeFile(process.env.DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON) + ) const DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING = coalesce( appsec.eventTracking && appsec.eventTracking.mode, process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING, @@ -644,6 +648,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) obfuscatorValueRegex: DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP, blockedTemplateHtml: DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML, blockedTemplateJson: DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON, + blockedTemplateGraphql: DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON, eventTracking: { enabled: ['extended', 'safe'].includes(DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING), mode: DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING diff --git a/packages/dd-trace/test/appsec/blocking.spec.js b/packages/dd-trace/test/appsec/blocking.spec.js index ec6c87757a7..0096a1938d4 100644 --- a/packages/dd-trace/test/appsec/blocking.spec.js +++ b/packages/dd-trace/test/appsec/blocking.spec.js @@ -70,9 +70,10 @@ describe('blocking', () => { block(req, res, rootSpan) expect(rootSpan.addTags).to.have.been.calledOnceWithExactly({ 'appsec.blocked': 'true' }) - expect(res.setHeader).to.have.been.calledTwice - expect(res.setHeader.firstCall).to.have.been.calledWithExactly('Content-Type', 'text/html; charset=utf-8') - expect(res.setHeader.secondCall).to.have.been.calledWithExactly('Content-Length', 12) + expect(res.writeHead).to.have.been.calledOnceWithExactly(403, { + 'Content-Type': 'text/html; charset=utf-8', + 'Content-Length': 12 + }) expect(res.end).to.have.been.calledOnceWithExactly('htmlBodyéé') }) @@ -81,9 +82,10 @@ describe('blocking', () => { block(req, res, rootSpan) expect(rootSpan.addTags).to.have.been.calledOnceWithExactly({ 'appsec.blocked': 'true' }) - expect(res.setHeader).to.have.been.calledTwice - expect(res.setHeader.firstCall).to.have.been.calledWithExactly('Content-Type', 'application/json') - expect(res.setHeader.secondCall).to.have.been.calledWithExactly('Content-Length', 8) + expect(res.writeHead).to.have.been.calledOnceWithExactly(403, { + 'Content-Type': 'application/json', + 'Content-Length': 8 + }) expect(res.end).to.have.been.calledOnceWithExactly('jsonBody') }) @@ -91,9 +93,10 @@ describe('blocking', () => { block(req, res, rootSpan) expect(rootSpan.addTags).to.have.been.calledOnceWithExactly({ 'appsec.blocked': 'true' }) - expect(res.setHeader).to.have.been.calledTwice - expect(res.setHeader.firstCall).to.have.been.calledWithExactly('Content-Type', 'application/json') - expect(res.setHeader.secondCall).to.have.been.calledWithExactly('Content-Length', 8) + expect(res.writeHead).to.have.been.calledOnceWithExactly(403, { + 'Content-Type': 'application/json', + 'Content-Length': 8 + }) expect(res.end).to.have.been.calledOnceWithExactly('jsonBody') }) @@ -102,9 +105,10 @@ describe('blocking', () => { block(req, res, rootSpan, abortController) expect(rootSpan.addTags).to.have.been.calledOnceWithExactly({ 'appsec.blocked': 'true' }) - expect(res.setHeader).to.have.been.calledTwice - expect(res.setHeader.firstCall).to.have.been.calledWithExactly('Content-Type', 'application/json') - expect(res.setHeader.secondCall).to.have.been.calledWithExactly('Content-Length', 8) + expect(res.writeHead).to.have.been.calledOnceWithExactly(403, { + 'Content-Type': 'application/json', + 'Content-Length': 8 + }) expect(res.end).to.have.been.calledOnceWithExactly('jsonBody') expect(abortController.signal.aborted).to.be.true }) @@ -158,8 +162,8 @@ describe('blocking', () => { block(req, res, rootSpan) + expect(res.writeHead).to.have.been.calledOnceWith(401) expect(res.end).to.have.been.calledOnceWithExactly(defaultBlockedTemplate.html) - expect(res.statusCode).to.be.equal(401) }) it('should block with default json template and custom status ' + @@ -177,8 +181,8 @@ describe('blocking', () => { block(req, res, rootSpan) + expect(res.writeHead).to.have.been.calledOnceWith(401) expect(res.end).to.have.been.calledOnceWithExactly(defaultBlockedTemplate.json) - expect(res.statusCode).to.be.equal(401) }) it('should block with default html template and custom status ' + @@ -196,8 +200,8 @@ describe('blocking', () => { block(req, res, rootSpan) + expect(res.writeHead).to.have.been.calledOnceWith(401) expect(res.end).to.have.been.calledOnceWithExactly(defaultBlockedTemplate.html) - expect(res.statusCode).to.be.equal(401) }) it('should block with default json template and custom status', () => { @@ -213,8 +217,8 @@ describe('blocking', () => { block(req, res, rootSpan) + expect(res.writeHead).to.have.been.calledOnceWith(401) expect(res.end).to.have.been.calledOnceWithExactly(defaultBlockedTemplate.json) - expect(res.statusCode).to.be.equal(401) }) it('should block with default json template and custom status ' + @@ -231,8 +235,8 @@ describe('blocking', () => { block(req, res, rootSpan) + expect(res.writeHead).to.have.been.calledOnceWith(401) expect(res.end).to.have.been.calledOnceWithExactly(defaultBlockedTemplate.json) - expect(res.statusCode).to.be.equal(401) }) it('should block with default html template and custom status ' + @@ -249,8 +253,8 @@ describe('blocking', () => { block(req, res, rootSpan) + expect(res.writeHead).to.have.been.calledOnceWith(401) expect(res.end).to.have.been.calledOnceWithExactly(defaultBlockedTemplate.html) - expect(res.statusCode).to.be.equal(401) }) it('should block with custom redirect', () => { diff --git a/packages/dd-trace/test/appsec/graphq.test-utils.js b/packages/dd-trace/test/appsec/graphq.test-utils.js new file mode 100644 index 00000000000..2b07bb19865 --- /dev/null +++ b/packages/dd-trace/test/appsec/graphq.test-utils.js @@ -0,0 +1,228 @@ +'use strict' + +const axios = require('axios') +const path = require('path') +const fs = require('fs') +const { graphqlJson, json } = require('../../src/appsec/blocked_templates') +const agent = require('../plugins/agent') +const appsec = require('../../src/appsec') +const Config = require('../../src/config') + +const schema = ` +directive @case(format: String) on FIELD + +type Book { + title: String, + author: String +} + +type Query { + books(title: String): [Book!]! +}` + +const query = ` +query GetBooks ($title: String) { + books(title: $title) { + title, + author + } +}` + +function makeQuery (derivativeParam) { + return ` + query GetBooks ($title: String) { + books(title: $title) @case(format: "${derivativeParam}") { + title + author + } + }` +} + +const books = [ + { + title: 'Test title', + author: 'Test author' + } +] + +const resolvers = { + Query: { + books: (root, args, context) => { + return books.filter(book => { + return book.title.includes(args.title) + }) + } + } +} + +async function makeGraphqlRequest (port, variables, derivativeParam, extraHeaders = {}) { + const headers = { + 'content-type': 'application/json', + ...extraHeaders + } + + const query = makeQuery(derivativeParam) + return axios.post(`http://localhost:${port}/graphql`, { + operationName: 'GetBooks', + query, + variables + }, { headers, maxRedirects: 0 }) +} + +function graphqlCommonTests (config) { + describe('Block with content', () => { + beforeEach(() => { + appsec.enable(new Config({ appsec: { enabled: true, rules: path.join(__dirname, 'graphql-rules.json') } })) + }) + + afterEach(() => { + appsec.disable() + }) + + it('Should block an attack on variable', async () => { + try { + await makeGraphqlRequest(config.port, { title: 'testattack' }, 'lower') + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + expect(e.response.status).to.be.equals(403) + expect(e.response.data).to.be.deep.equal(JSON.parse(graphqlJson)) + } + }) + + it('Should block an attack on directive', async () => { + try { + await makeGraphqlRequest(config.port, { title: 'Test' }, 'testattack') + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + expect(e.response.status).to.be.equals(403) + expect(e.response.data).to.be.deep.equal(JSON.parse(graphqlJson)) + } + }) + + it('Should set appsec.blocked on blocked attack', (done) => { + agent.use(payload => { + expect(payload[0][0].meta['appsec.blocked']).to.be.equal('true') + done() + }) + + makeGraphqlRequest(config.port, { title: 'testattack' }, 'lower').then(() => { + done(new Error('block expected')) + }) + }) + + it('Should not block a safe request', async () => { + const response = await makeGraphqlRequest(config.port, { title: 'Test' }, 'lower') + + expect(response.data).to.be.deep.equal({ data: { books } }) + }) + + it('Should block an http attack with graphql response', async () => { + await makeGraphqlRequest(config.port, { title: 'Test' }, 'lower') + + try { + await makeGraphqlRequest(config.port, { title: 'testattack' }, 'lower', { customHeader: 'lower' }) + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + expect(e.response.status).to.be.equals(403) + expect(e.response.data).to.be.deep.equal(JSON.parse(graphqlJson)) + } + }) + + it('Should block an http attack with json response when it is not a graphql endpoint', async () => { + await makeGraphqlRequest(config.port, { title: 'Test' }, 'lower') + + try { + await axios.get(`http://localhost:${config.port}/hello`, { headers: { customHeader: 'testattack' } }) + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + expect(e.response.status).to.be.equals(403) + expect(e.response.data).to.be.deep.equal(JSON.parse(json)) + } + }) + }) + + describe('Block with custom content', () => { + const blockedTemplateGraphql = path.join(__dirname, 'graphql.block.json') + const customGraphqlJson = fs.readFileSync(blockedTemplateGraphql) + + beforeEach(() => { + appsec.enable(new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'graphql-rules.json'), + blockedTemplateGraphql + } + })) + }) + + afterEach(() => { + appsec.disable() + }) + + it('Should block an attack on variable', async () => { + try { + await makeGraphqlRequest(config.port, { title: 'testattack' }, 'lower') + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + expect(e.response.status).to.be.equals(403) + expect(e.response.data).to.be.deep.equal(JSON.parse(customGraphqlJson)) + } + }) + }) + + describe('Block with redirect', () => { + beforeEach(() => { + appsec.enable(new Config({ + appsec: { + enabled: true, + rules: path.join(__dirname, 'graphql-rules-redirect.json') + } + })) + }) + + afterEach(() => { + appsec.disable() + }) + + it('Should block an attack', async () => { + try { + await makeGraphqlRequest(config.port, { title: 'testattack' }, 'lower') + + return Promise.reject(new Error('Request should not return 200')) + } catch (e) { + expect(e.response.status).to.be.equal(301) + expect(e.response.headers.location).to.be.equal('/you-have-been-blocked') + } + }) + + it('Should set appsec.blocked on blocked attack', (done) => { + agent.use(payload => { + expect(payload[0][0].meta['appsec.blocked']).to.be.equal('true') + done() + }) + + makeGraphqlRequest(config.port, { title: 'testattack' }, 'lower').then(() => { + done(new Error('block expected')) + }) + }) + + it('Should not block a safe request', async () => { + const response = await makeGraphqlRequest(config.port, { title: 'Test' }, 'lower') + + expect(response.data).to.be.deep.equal({ data: { books } }) + }) + }) +} + +module.exports = { + books, + schema, + query, + resolvers, + graphqlCommonTests +} diff --git a/packages/dd-trace/test/appsec/graphql-rules-redirect.json b/packages/dd-trace/test/appsec/graphql-rules-redirect.json new file mode 100644 index 00000000000..d6da1ed39b8 --- /dev/null +++ b/packages/dd-trace/test/appsec/graphql-rules-redirect.json @@ -0,0 +1,49 @@ +{ + "version": "2.2", + "metadata": { + "rules_version": "1.5.0" + }, + "rules": [ + { + "id": "test-rule-id-1", + "name": "test-rule-name-1", + "tags": { + "type": "security_scanner", + "category": "attack_attempt" + }, + "conditions": [ + { + "parameters": { + "inputs": [ + { + "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + }, + { + "address": "server.request.headers.no_cookies" + } + ], + "list": [ + "testattack" + ] + }, + "operator": "phrase_match" + } + ], + "transformers": ["lowercase"], + "on_match": ["block"] + } + ], + "actions": [ + { + "id": "block", + "type": "redirect_request", + "parameters": { + "status_code": 301, + "location": "/you-have-been-blocked" + } + } + ] +} diff --git a/packages/dd-trace/test/appsec/graphql-rules.json b/packages/dd-trace/test/appsec/graphql-rules.json index e258dda5226..f4cadcdf9bb 100644 --- a/packages/dd-trace/test/appsec/graphql-rules.json +++ b/packages/dd-trace/test/appsec/graphql-rules.json @@ -17,6 +17,12 @@ "inputs": [ { "address": "graphql.server.all_resolvers" + }, + { + "address": "graphql.server.resolver" + }, + { + "address": "server.request.headers.no_cookies" } ], "list": [ diff --git a/packages/dd-trace/test/appsec/graphql.apollo-server-express.plugin.spec.js b/packages/dd-trace/test/appsec/graphql.apollo-server-express.plugin.spec.js new file mode 100644 index 00000000000..a1cf874af04 --- /dev/null +++ b/packages/dd-trace/test/appsec/graphql.apollo-server-express.plugin.spec.js @@ -0,0 +1,60 @@ +'use strict' + +const getPort = require('get-port') +const agent = require('../plugins/agent') +const { + schema, + resolvers, + graphqlCommonTests +} = require('./graphq.test-utils') + +withVersions('apollo-server-core', 'express', '>=4', expressVersion => { + withVersions('apollo-server-core', 'apollo-server-express', apolloServerExpressVersion => { + const config = {} + let express, expressServer, ApolloServer, gql + let app, server + + before(() => { + return agent.load(['express', 'graphql', 'apollo-server-core', 'http'], { client: false }) + }) + + before(() => { + const apolloServerExpress = + require(`../../../../versions/apollo-server-express@${apolloServerExpressVersion}`).get() + ApolloServer = apolloServerExpress.ApolloServer + gql = apolloServerExpress.gql + + express = require(`../../../../versions/express@${expressVersion}`).get() + }) + + before(async () => { + app = express() + + const typeDefs = gql(schema) + + server = new ApolloServer({ + typeDefs, + resolvers + }) + + await server.start() + + server.applyMiddleware({ app }) + + config.port = await getPort() + + return new Promise(resolve => { + expressServer = app.listen({ port: config.port }, () => { + resolve() + }) + }) + }) + + after(async () => { + await server.stop() + expressServer.close() + }) + + graphqlCommonTests(config) + }) +}) diff --git a/packages/dd-trace/test/appsec/graphql.apollo-server-fastify.plugin.spec.js b/packages/dd-trace/test/appsec/graphql.apollo-server-fastify.plugin.spec.js new file mode 100644 index 00000000000..f6e423fb0e0 --- /dev/null +++ b/packages/dd-trace/test/appsec/graphql.apollo-server-fastify.plugin.spec.js @@ -0,0 +1,60 @@ +'use strict' + +const getPort = require('get-port') +const agent = require('../plugins/agent') +const { + schema, + resolvers, + graphqlCommonTests +} = require('./graphq.test-utils') + +withVersions('apollo-server-core', 'fastify', '3', fastifyVersion => { + withVersions('apollo-server-core', 'apollo-server-fastify', apolloServerFastifyVersion => { + const config = {} + let fastify, ApolloServer, gql + let app, server + + before(() => { + return agent.load(['fastify', 'graphql', 'apollo-server-core', 'http'], { client: false }) + }) + + before(() => { + const apolloServerFastify = + require(`../../../../versions/apollo-server-fastify@${apolloServerFastifyVersion}`).get() + ApolloServer = apolloServerFastify.ApolloServer + gql = apolloServerFastify.gql + + fastify = require(`../../../../versions/fastify@${fastifyVersion}`).get() + }) + + before(async () => { + app = fastify() + + const typeDefs = gql(schema) + + server = new ApolloServer({ + typeDefs, + resolvers + }) + + await server.start() + + app.register(server.createHandler()) + + config.port = await getPort() + + return new Promise(resolve => { + app.listen({ port: config.port }, (data) => { + resolve() + }) + }) + }) + + after(async () => { + await server.stop() + await app.close() + }) + + graphqlCommonTests(config) + }) +}) diff --git a/packages/dd-trace/test/appsec/graphql.apollo-server.plugin.spec.js b/packages/dd-trace/test/appsec/graphql.apollo-server.plugin.spec.js new file mode 100644 index 00000000000..f5b3863f963 --- /dev/null +++ b/packages/dd-trace/test/appsec/graphql.apollo-server.plugin.spec.js @@ -0,0 +1,44 @@ +'use strict' + +const getPort = require('get-port') +const path = require('path') +const agent = require('../plugins/agent') +const { + schema, + resolvers, + graphqlCommonTests +} = require('./graphq.test-utils') + +withVersions('apollo-server', '@apollo/server', apolloServerVersion => { + const config = {} + let ApolloServer, startStandaloneServer + let server + + before(() => { + return agent.load(['express', 'graphql', 'apollo-server', 'http'], { client: false }) + }) + + before(() => { + const apolloServerPath = require(`../../../../versions/@apollo/server@${apolloServerVersion}`).getPath() + + ApolloServer = require(apolloServerPath).ApolloServer + startStandaloneServer = require(path.join(apolloServerPath, '..', 'standalone')).startStandaloneServer + }) + + before(async () => { + server = new ApolloServer({ + typeDefs: schema, + resolvers + }) + + config.port = await getPort() + + await startStandaloneServer(server, { listen: { port: config.port } }) + }) + + after(async () => { + await server.stop() + }) + + graphqlCommonTests(config) +}) diff --git a/packages/dd-trace/test/appsec/graphql.block.json b/packages/dd-trace/test/appsec/graphql.block.json new file mode 100644 index 00000000000..46d71c957d1 --- /dev/null +++ b/packages/dd-trace/test/appsec/graphql.block.json @@ -0,0 +1,7 @@ +{ + "errors": [ + { + "message": "custom blocking message" + } + ] +} diff --git a/packages/dd-trace/test/appsec/graphql.spec.js b/packages/dd-trace/test/appsec/graphql.spec.js new file mode 100644 index 00000000000..d0a459d4729 --- /dev/null +++ b/packages/dd-trace/test/appsec/graphql.spec.js @@ -0,0 +1,237 @@ +const proxyquire = require('proxyquire') +const waf = require('../../src/appsec/waf') +const web = require('../../src/plugins/util/web') +const { storage } = require('../../../datadog-core') +const addresses = require('../../src/appsec/addresses') + +const { + startGraphqlResolve, + graphqlMiddlewareChannel, + apolloChannel, + apolloServerCoreChannel +} = require('../../src/appsec/channels') + +describe('GraphQL', () => { + let graphql + let blocking + + beforeEach(() => { + const getBlockingData = sinon.stub() + blocking = { + getBlockingData, + setTemplates: sinon.stub(), + block: sinon.stub() + } + + getBlockingData.returns({ + headers: { 'Content-type': 'application/json' }, + body: '{ "message": "blocked" }', + statusCode: 403 + }) + + graphql = proxyquire('../../src/appsec/graphql', { + './blocking': blocking + }) + }) + + afterEach(() => { + sinon.restore() + }) + + describe('enable', () => { + beforeEach(() => { + }) + + afterEach(() => { + graphql.disable() + sinon.restore() + }) + + it('Should subscribe to all channels', () => { + expect(graphqlMiddlewareChannel.start.hasSubscribers).to.be.false + expect(graphqlMiddlewareChannel.end.hasSubscribers).to.be.false + expect(apolloChannel.start.hasSubscribers).to.be.false + expect(apolloChannel.asyncEnd.hasSubscribers).to.be.false + expect(apolloServerCoreChannel.start.hasSubscribers).to.be.false + expect(apolloServerCoreChannel.asyncEnd.hasSubscribers).to.be.false + expect(startGraphqlResolve.hasSubscribers).to.be.false + + graphql.enable() + + expect(graphqlMiddlewareChannel.start.hasSubscribers).to.be.true + expect(graphqlMiddlewareChannel.end.hasSubscribers).to.be.true + expect(apolloChannel.start.hasSubscribers).to.be.true + expect(apolloChannel.asyncEnd.hasSubscribers).to.be.true + expect(apolloServerCoreChannel.start.hasSubscribers).to.be.true + expect(apolloServerCoreChannel.asyncEnd.hasSubscribers).to.be.true + expect(startGraphqlResolve.hasSubscribers).to.be.true + }) + }) + + describe('disable', () => { + it('Should unsubscribe from all channels', () => { + graphql.enable() + + expect(graphqlMiddlewareChannel.start.hasSubscribers).to.be.true + expect(graphqlMiddlewareChannel.end.hasSubscribers).to.be.true + expect(apolloChannel.start.hasSubscribers).to.be.true + expect(apolloChannel.asyncEnd.hasSubscribers).to.be.true + expect(apolloServerCoreChannel.start.hasSubscribers).to.be.true + expect(apolloServerCoreChannel.asyncEnd.hasSubscribers).to.be.true + expect(startGraphqlResolve.hasSubscribers).to.be.true + + graphql.disable() + + expect(graphqlMiddlewareChannel.start.hasSubscribers).to.be.false + expect(graphqlMiddlewareChannel.end.hasSubscribers).to.be.false + expect(apolloChannel.start.hasSubscribers).to.be.false + expect(apolloChannel.asyncEnd.hasSubscribers).to.be.false + expect(apolloServerCoreChannel.start.hasSubscribers).to.be.false + expect(apolloServerCoreChannel.asyncEnd.hasSubscribers).to.be.false + expect(startGraphqlResolve.hasSubscribers).to.be.false + }) + }) + + describe('onGraphqlStartResolve', () => { + beforeEach(() => { + sinon.stub(waf, 'run').returns(['']) + sinon.stub(storage, 'getStore').returns({ req: {} }) + sinon.stub(web, 'root').returns({}) + graphql.enable() + }) + + afterEach(() => { + sinon.restore() + graphql.disable() + }) + + it('Should not call waf if resolvers is undefined', () => { + const context = { + resolver: undefined + } + + startGraphqlResolve.publish({ context }) + + expect(waf.run).not.to.have.been.called + }) + + it('Should not call waf if resolvers is not an object', () => { + const context = { + resolver: '' + } + + startGraphqlResolve.publish({ context }) + + expect(waf.run).not.to.have.been.called + }) + + it('Should not call waf if req is unavailable', () => { + const context = {} + const resolverInfo = { + user: [ { id: '1234' } ] + } + + storage.getStore().req = undefined + + startGraphqlResolve.publish({ context, resolverInfo }) + + expect(waf.run).not.to.have.been.called + }) + + it('Should call waf if resolvers is well formatted', () => { + const context = {} + + const resolverInfo = { + user: [ { id: '1234' } ] + } + + startGraphqlResolve.publish({ context, resolverInfo }) + + expect(waf.run).to.have.been.calledOnceWithExactly( + { + [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo + }, + {} + ) + }) + }) + + describe('block response', () => { + const req = {} + const res = {} + beforeEach(() => { + sinon.stub(storage, 'getStore').returns({ req, res }) + + graphql.enable() + graphqlMiddlewareChannel.start.publish({ req, res }) + apolloChannel.start.publish() + }) + + afterEach(() => { + graphqlMiddlewareChannel.end.publish({ req }) + graphql.disable() + sinon.restore() + }) + + it('Should not call abort', () => { + const context = { + abortController: { + abort: sinon.stub() + } + } + + const resolverInfo = { + user: [ { id: '1234' } ] + } + + const abortController = {} + + sinon.stub(waf, 'run').returns(['']) + + startGraphqlResolve.publish({ context, resolverInfo }) + + expect(waf.run).to.have.been.calledOnceWithExactly( + { + [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo + }, + {} + ) + expect(context.abortController.abort).not.to.have.been.called + + apolloChannel.asyncEnd.publish({ abortController }) + + expect(blocking.getBlockingData).not.to.have.been.called + }) + + it('Should call abort', () => { + const context = { + abortController: { + abort: sinon.stub() + } + } + + const resolverInfo = { + user: [ { id: '1234' } ] + } + + const abortController = context.abortController + + sinon.stub(waf, 'run').returns(['block']) + sinon.stub(web, 'root').returns({}) + + startGraphqlResolve.publish({ context, resolverInfo }) + + expect(waf.run).to.have.been.calledOnceWithExactly( + { + [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo + }, + {} + ) + expect(context.abortController.abort).to.have.been.called + const abortData = {} + apolloChannel.asyncEnd.publish({ abortController, abortData }) + + expect(blocking.getBlockingData).to.have.been.calledOnceWithExactly(req, 'graphql', {}) + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index 14306f8203a..dea33e01faf 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -8,7 +8,6 @@ const appsec = require('../../src/appsec') const { bodyParser, cookieParser, - graphqlFinishExecute, incomingHttpRequestStart, incomingHttpRequestEnd, queryParser, @@ -21,7 +20,6 @@ const axios = require('axios') const getPort = require('get-port') const blockedTemplate = require('../../src/appsec/blocked_templates') const { storage } = require('../../../datadog-core') -const addresses = require('../../src/appsec/addresses') const telemetryMetrics = require('../../src/telemetry/metrics') describe('AppSec Index', () => { @@ -32,6 +30,7 @@ describe('AppSec Index', () => { let passport let log let appsecTelemetry + let graphql const RULES = { rules: [{ a: 1 }] } @@ -80,12 +79,18 @@ describe('AppSec Index', () => { disable: sinon.stub() } + graphql = { + enable: sinon.stub(), + disable: sinon.stub() + } + AppSec = proxyquire('../../src/appsec', { '../log': log, '../plugins/util/web': web, './blocking': blocking, './passport': passport, - './telemetry': appsecTelemetry + './telemetry': appsecTelemetry, + './graphql': graphql }) sinon.stub(fs, 'readFileSync').returns(JSON.stringify(RULES)) @@ -112,6 +117,7 @@ describe('AppSec Index', () => { expect(incomingHttpRequestStart.subscribe) .to.have.been.calledOnceWithExactly(AppSec.incomingHttpStartTranslator) expect(incomingHttpRequestEnd.subscribe).to.have.been.calledOnceWithExactly(AppSec.incomingHttpEndTranslator) + expect(graphql.enable).to.have.been.calledOnceWithExactly() }) it('should log when enable fails', () => { @@ -134,13 +140,11 @@ describe('AppSec Index', () => { expect(cookieParser.hasSubscribers).to.be.false expect(queryParser.hasSubscribers).to.be.false expect(passportVerify.hasSubscribers).to.be.false - expect(graphqlFinishExecute.hasSubscribers).to.be.false AppSec.enable(config) expect(bodyParser.hasSubscribers).to.be.true expect(cookieParser.hasSubscribers).to.be.true - expect(graphqlFinishExecute.hasSubscribers).to.be.true expect(queryParser.hasSubscribers).to.be.true expect(passportVerify.hasSubscribers).to.be.true }) @@ -183,6 +187,7 @@ describe('AppSec Index', () => { expect(incomingHttpRequestStart.unsubscribe) .to.have.been.calledOnceWithExactly(AppSec.incomingHttpStartTranslator) expect(incomingHttpRequestEnd.unsubscribe).to.have.been.calledOnceWithExactly(AppSec.incomingHttpEndTranslator) + expect(graphql.disable).to.have.been.calledOnceWithExactly() }) it('should disable AppSec when DC channels are not active', () => { @@ -202,7 +207,6 @@ describe('AppSec Index', () => { expect(bodyParser.hasSubscribers).to.be.false expect(cookieParser.hasSubscribers).to.be.false - expect(graphqlFinishExecute.hasSubscribers).to.be.false expect(queryParser.hasSubscribers).to.be.false expect(passportVerify.hasSubscribers).to.be.false }) @@ -546,9 +550,10 @@ describe('AppSec Index', () => { 'content-type': 'application/json', 'content-lenght': 42 }), - setHeader: sinon.stub(), + writeHead: sinon.stub(), end: sinon.stub() } + res.writeHead.returns(res) AppSec.enable(config) AppSec.incomingHttpStartTranslator({ req, res }) @@ -704,66 +709,6 @@ describe('AppSec Index', () => { expect(passport.passportTrackEvent).not.to.have.been.called }) }) - - describe('onGraphqlQueryParse', () => { - it('Should not call waf if resolvers is undefined', () => { - const resolvers = undefined - const rootSpan = {} - - sinon.stub(waf, 'run') - sinon.stub(storage, 'getStore').returns({ req: {} }) - web.root.returns(rootSpan) - - graphqlFinishExecute.publish({ resolvers }) - - expect(waf.run).not.to.have.been.called - }) - - it('Should not call waf if resolvers is not an object', () => { - const resolvers = '' - const rootSpan = {} - - sinon.stub(waf, 'run') - sinon.stub(storage, 'getStore').returns({ req: {} }) - web.root.returns(rootSpan) - - graphqlFinishExecute.publish({ resolvers }) - - expect(waf.run).not.to.have.been.called - }) - - it('Should not call waf if req is unavailable', () => { - const resolvers = { user: [ { id: '1234' } ] } - sinon.stub(waf, 'run') - sinon.stub(storage, 'getStore').returns({}) - - graphqlFinishExecute.publish({ resolvers }) - - expect(waf.run).not.to.have.been.called - }) - - it('Should call waf if resolvers is well formatted', () => { - const context = { - resolvers: { - user: [ { id: '1234' } ] - } - } - const rootSpan = {} - - sinon.stub(waf, 'run') - sinon.stub(storage, 'getStore').returns({ req: {} }) - web.root.returns(rootSpan) - - graphqlFinishExecute.publish({ context }) - - expect(waf.run).to.have.been.calledOnceWithExactly( - { - [addresses.HTTP_INCOMING_GRAPHQL_RESOLVERS]: context.resolvers - }, - {} - ) - }) - }) }) describe('Metrics', () => { diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 3459c50e260..a7647e395f1 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -22,6 +22,8 @@ describe('Config', () => { const BLOCKED_TEMPLATE_HTML = readFileSync(BLOCKED_TEMPLATE_HTML_PATH, { encoding: 'utf8' }) const BLOCKED_TEMPLATE_JSON_PATH = require.resolve('./fixtures/config/appsec-blocked-template.json') const BLOCKED_TEMPLATE_JSON = readFileSync(BLOCKED_TEMPLATE_JSON_PATH, { encoding: 'utf8' }) + const BLOCKED_TEMPLATE_GRAPHQL_PATH = require.resolve('./fixtures/config/appsec-blocked-graphql-template.json') + const BLOCKED_TEMPLATE_GRAPHQL = readFileSync(BLOCKED_TEMPLATE_GRAPHQL_PATH, { encoding: 'utf8' }) const DD_GIT_PROPERTIES_FILE = require.resolve('./fixtures/config/git.properties') beforeEach(() => { @@ -107,6 +109,7 @@ describe('Config', () => { expect(config).to.have.nested.property('appsec.obfuscatorValueRegex').with.length(443) expect(config).to.have.nested.property('appsec.blockedTemplateHtml', undefined) expect(config).to.have.nested.property('appsec.blockedTemplateJson', undefined) + expect(config).to.have.nested.property('appsec.blockedTemplateGraphql', undefined) expect(config).to.have.nested.property('appsec.eventTracking.enabled', true) expect(config).to.have.nested.property('appsec.eventTracking.mode', 'safe') expect(config).to.have.nested.property('appsec.apiSecurity.enabled', false) @@ -208,6 +211,7 @@ describe('Config', () => { process.env.DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP = '.*' process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML = BLOCKED_TEMPLATE_HTML_PATH process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON = BLOCKED_TEMPLATE_JSON_PATH + process.env.DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON = BLOCKED_TEMPLATE_GRAPHQL_PATH process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING = 'extended' process.env.DD_REMOTE_CONFIGURATION_ENABLED = 'false' process.env.DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS = '42' @@ -288,6 +292,7 @@ describe('Config', () => { expect(config).to.have.nested.property('appsec.obfuscatorValueRegex', '.*') expect(config).to.have.nested.property('appsec.blockedTemplateHtml', BLOCKED_TEMPLATE_HTML) expect(config).to.have.nested.property('appsec.blockedTemplateJson', BLOCKED_TEMPLATE_JSON) + expect(config).to.have.nested.property('appsec.blockedTemplateGraphql', BLOCKED_TEMPLATE_GRAPHQL) expect(config).to.have.nested.property('appsec.eventTracking.enabled', true) expect(config).to.have.nested.property('appsec.eventTracking.mode', 'extended') expect(config).to.have.nested.property('appsec.apiSecurity.enabled', true) @@ -658,8 +663,9 @@ describe('Config', () => { process.env.DD_APPSEC_WAF_TIMEOUT = 11 process.env.DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP = '^$' process.env.DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP = '^$' - process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML = BLOCKED_TEMPLATE_JSON // note the inversion between - process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON = BLOCKED_TEMPLATE_HTML // json and html here + process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML = BLOCKED_TEMPLATE_JSON_PATH // note the inversion between + process.env.DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON = BLOCKED_TEMPLATE_HTML_PATH // json and html here + process.env.DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON = BLOCKED_TEMPLATE_JSON_PATH // json and html here process.env.DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING = 'disabled' process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED = 'false' process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE = 0.5 @@ -724,6 +730,7 @@ describe('Config', () => { obfuscatorValueRegex: '.*', blockedTemplateHtml: BLOCKED_TEMPLATE_HTML_PATH, blockedTemplateJson: BLOCKED_TEMPLATE_JSON_PATH, + blockedTemplateGraphql: BLOCKED_TEMPLATE_GRAPHQL_PATH, eventTracking: { mode: 'safe' }, @@ -777,6 +784,7 @@ describe('Config', () => { expect(config).to.have.nested.property('appsec.obfuscatorValueRegex', '.*') expect(config).to.have.nested.property('appsec.blockedTemplateHtml', BLOCKED_TEMPLATE_HTML) expect(config).to.have.nested.property('appsec.blockedTemplateJson', BLOCKED_TEMPLATE_JSON) + expect(config).to.have.nested.property('appsec.blockedTemplateGraphql', BLOCKED_TEMPLATE_GRAPHQL) expect(config).to.have.nested.property('appsec.eventTracking.enabled', true) expect(config).to.have.nested.property('appsec.eventTracking.mode', 'safe') expect(config).to.have.nested.property('appsec.apiSecurity.enabled', true) @@ -803,6 +811,7 @@ describe('Config', () => { obfuscatorValueRegex: '.*', blockedTemplateHtml: undefined, blockedTemplateJson: undefined, + blockedTemplateGraphql: undefined, eventTracking: { mode: 'disabled' }, @@ -821,6 +830,7 @@ describe('Config', () => { obfuscatorValueRegex: '^$', blockedTemplateHtml: BLOCKED_TEMPLATE_HTML_PATH, blockedTemplateJson: BLOCKED_TEMPLATE_JSON_PATH, + blockedTemplateGraphql: BLOCKED_TEMPLATE_GRAPHQL_PATH, eventTracking: { mode: 'safe' }, @@ -842,6 +852,7 @@ describe('Config', () => { obfuscatorValueRegex: '.*', blockedTemplateHtml: undefined, blockedTemplateJson: undefined, + blockedTemplateGraphql: undefined, eventTracking: { enabled: false, mode: 'disabled' @@ -1122,19 +1133,22 @@ describe('Config', () => { enabled: true, rules: 'path/to/rules.json', blockedTemplateHtml: 'DOES_NOT_EXIST.html', - blockedTemplateJson: 'DOES_NOT_EXIST.json' + blockedTemplateJson: 'DOES_NOT_EXIST.json', + blockedTemplateGraphql: 'DOES_NOT_EXIST.json' } }) - expect(log.error).to.be.callCount(2) + expect(log.error).to.be.callCount(3) expect(log.error.firstCall).to.have.been.calledWithExactly(error) expect(log.error.secondCall).to.have.been.calledWithExactly(error) + expect(log.error.thirdCall).to.have.been.calledWithExactly(error) expect(config.appsec.enabled).to.be.true expect(config.appsec.rules).to.eq('path/to/rules.json') expect(config.appsec.customRulesProvided).to.be.true expect(config.appsec.blockedTemplateHtml).to.be.undefined expect(config.appsec.blockedTemplateJson).to.be.undefined + expect(config.appsec.blockedTemplateGraphql).to.be.undefined }) context('auto configuration w/ unix domain sockets', () => { diff --git a/packages/dd-trace/test/fixtures/config/appsec-blocked-graphql-template.json b/packages/dd-trace/test/fixtures/config/appsec-blocked-graphql-template.json new file mode 100644 index 00000000000..e792d611dd7 --- /dev/null +++ b/packages/dd-trace/test/fixtures/config/appsec-blocked-graphql-template.json @@ -0,0 +1,5 @@ +{ + "errors": { + "message": "blocked" + } +} diff --git a/packages/dd-trace/test/plugins/externals.json b/packages/dd-trace/test/plugins/externals.json index 01f65e0551e..8cbffb12ed2 100644 --- a/packages/dd-trace/test/plugins/externals.json +++ b/packages/dd-trace/test/plugins/externals.json @@ -99,6 +99,42 @@ "versions": ["^15.2.0"] } ], + "apollo-server-core": [ + { + "name": "fastify", + "versions": [">=3"] + }, + { + "name": "express", + "versions": [">=4"] + }, + { + "name": "apollo-server-fastify", + "versions": [">=3"] + }, + { + "name": "apollo-server-express", + "versions": [">=3"] + }, + { + "name": "graphql", + "versions": ["^15.2.0"] + } + ], + "apollo-server": [ + { + "name": "express", + "versions": [">=4"] + }, + { + "name": "@apollo/server", + "versions": [">=4"] + }, + { + "name": "graphql", + "versions": ["^16.6.0"] + } + ], "grpc": [ { "name": "@grpc/proto-loader", From 9d8e97f06f4d386356ada9dd46b476a1f6fd4845 Mon Sep 17 00:00:00 2001 From: Nicolas Savoire Date: Wed, 20 Dec 2023 11:36:31 +0100 Subject: [PATCH 126/147] [profiling] Add thread id labels to heap and wall profiles (#3888) * Add thread id to heap and wall profiles * Allow generateLabels to be called without arguments In wall profile, if generateLabels is called without arguments, return thread labels. --- integration-tests/profiler.spec.js | 8 ++++- .../src/profiling/profilers/events.js | 4 +-- .../src/profiling/profilers/shared.js | 36 +++++++++++++++++-- .../dd-trace/src/profiling/profilers/space.js | 3 +- .../dd-trace/src/profiling/profilers/wall.js | 29 ++++++++------- 5 files changed, 61 insertions(+), 19 deletions(-) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index 10903cad81f..8be691fe592 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -188,9 +188,11 @@ describe('profiler', () => { const rootSpanKey = strings.dedup('local root span id') const endpointKey = strings.dedup('trace endpoint') const threadNameKey = strings.dedup('thread name') + const threadIdKey = strings.dedup('thread id') + const osThreadIdKey = strings.dedup('os thread id') const threadNameValue = strings.dedup('Main Event Loop') for (const sample of prof.sample) { - let ts, spanId, rootSpanId, endpoint, threadName + let ts, spanId, rootSpanId, endpoint, threadName, threadId, osThreadId for (const label of sample.label) { switch (label.key) { case tsKey: ts = label.num; break @@ -198,11 +200,15 @@ describe('profiler', () => { case rootSpanKey: rootSpanId = label.str; break case endpointKey: endpoint = label.str; break case threadNameKey: threadName = label.str; break + case threadIdKey: threadId = label.str; break + case osThreadIdKey: osThreadId = label.str; break default: assert.fail(`Unexpected label key ${strings.dedup(label.key)}`) } } // Timestamp must be defined and be between process start and end time assert.isDefined(ts) + assert.isNumber(osThreadId) + assert.equal(threadId, strings.dedup('0')) assert.isTrue(ts <= procEnd) assert.isTrue(ts >= procStart) // Thread name must be defined and exactly equal "Main Event Loop" diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index 3e1f4cd9a07..03fa78300e8 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -1,5 +1,5 @@ const { performance, constants, PerformanceObserver } = require('node:perf_hooks') -const { END_TIMESTAMP } = require('./shared') +const { END_TIMESTAMP_LABEL } = require('./shared') const semver = require('semver') const { Function, Label, Line, Location, Profile, Sample, StringTable, ValueType } = require('pprof-format') const pprof = require('@datadog/pprof/') @@ -202,7 +202,7 @@ class EventsProfiler { decorator.eventTypeLabel = labelFromStrStr(stringTable, 'event', eventType) decorators[eventType] = decorator } - const timestampLabelKey = stringTable.dedup(END_TIMESTAMP) + const timestampLabelKey = stringTable.dedup(END_TIMESTAMP_LABEL) let durationFrom = Number.POSITIVE_INFINITY let durationTo = 0 diff --git a/packages/dd-trace/src/profiling/profilers/shared.js b/packages/dd-trace/src/profiling/profilers/shared.js index 49acc6ced61..4337a80ae29 100644 --- a/packages/dd-trace/src/profiling/profilers/shared.js +++ b/packages/dd-trace/src/profiling/profilers/shared.js @@ -2,8 +2,38 @@ const { isMainThread, threadId } = require('node:worker_threads') +const END_TIMESTAMP_LABEL = 'end_timestamp_ns' +const THREAD_NAME_LABEL = 'thread name' +const OS_THREAD_ID_LABEL = 'os thread id' +const THREAD_ID_LABEL = 'thread id' +const threadNamePrefix = isMainThread ? 'Main' : `Worker #${threadId}` +const eventLoopThreadName = `${threadNamePrefix} Event Loop` + +function getThreadLabels () { + const pprof = require('@datadog/pprof') + const nativeThreadId = pprof.getNativeThreadId() + return { + [THREAD_NAME_LABEL]: eventLoopThreadName, + [THREAD_ID_LABEL]: `${threadId}`, + [OS_THREAD_ID_LABEL]: `${nativeThreadId}` + } +} + +function cacheThreadLabels () { + let labels + return () => { + if (!labels) { + labels = getThreadLabels() + } + return labels + } +} + module.exports = { - END_TIMESTAMP: 'end_timestamp_ns', - THREAD_NAME: 'thread name', - threadNamePrefix: isMainThread ? 'Main' : `Worker #${threadId}` + END_TIMESTAMP_LABEL, + THREAD_NAME_LABEL, + THREAD_ID_LABEL, + threadNamePrefix, + eventLoopThreadName, + getThreadLabels: cacheThreadLabels() } diff --git a/packages/dd-trace/src/profiling/profilers/space.js b/packages/dd-trace/src/profiling/profilers/space.js index 767136603ba..4e654689e54 100644 --- a/packages/dd-trace/src/profiling/profilers/space.js +++ b/packages/dd-trace/src/profiling/profilers/space.js @@ -1,6 +1,7 @@ 'use strict' const { oomExportStrategies } = require('../constants') +const { getThreadLabels } = require('./shared') function strategiesToCallbackMode (strategies, callbackMode) { return strategies.includes(oomExportStrategies.ASYNC_CALLBACK) ? callbackMode.Async : 0 @@ -33,7 +34,7 @@ class NativeSpaceProfiler { } profile () { - return this._pprof.heap.profile(undefined, this._mapper) + return this._pprof.heap.profile(undefined, this._mapper, getThreadLabels) } encode (profile) { diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index 991a44efd0a..a5947923026 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -7,13 +7,12 @@ const { HTTP_METHOD, HTTP_ROUTE, RESOURCE_NAME, SPAN_TYPE } = require('../../../ const { WEB } = require('../../../../../ext/types') const runtimeMetrics = require('../../runtime_metrics') const telemetryMetrics = require('../../telemetry/metrics') -const { END_TIMESTAMP, THREAD_NAME, threadNamePrefix } = require('./shared') +const { END_TIMESTAMP_LABEL, getThreadLabels } = require('./shared') const beforeCh = dc.channel('dd-trace:storage:before') const enterCh = dc.channel('dd-trace:storage:enter') const spanFinishCh = dc.channel('dd-trace:span:finish') const profilerTelemetryMetrics = telemetryMetrics.manager.namespace('profilers') -const threadName = `${threadNamePrefix} Event Loop` const MemoizedWebTags = Symbol('NativeWallProfiler.MemoizedWebTags') @@ -96,12 +95,9 @@ class NativeWallProfiler { this._enter = this._enter.bind(this) this._spanFinished = this._spanFinished.bind(this) } - this._generateLabels = this._generateLabels.bind(this) - } else { - // Explicitly assigning, to express the intent that this is meant to be - // undefined when passed to pprof.time.stop() when not using sample contexts. - this._generateLabels = undefined } + this._generateLabels = this._generateLabels.bind(this) + this._logger = options.logger this._started = false } @@ -239,12 +235,21 @@ class NativeWallProfiler { return profile } - _generateLabels ({ context: { spanId, rootSpanId, webTags, endpoint }, timestamp }) { - const labels = this._timelineEnabled ? { - [THREAD_NAME]: threadName, + _generateLabels (context) { + if (context == null) { + // generateLabels is also called for samples without context. + // In that case just return thread labels. + return getThreadLabels() + } + + const labels = { ...getThreadLabels() } + + const { context: { spanId, rootSpanId, webTags, endpoint }, timestamp } = context + + if (this._timelineEnabled) { // Incoming timestamps are in microseconds, we emit nanos. - [END_TIMESTAMP]: timestamp * 1000n - } : {} + labels[END_TIMESTAMP_LABEL] = timestamp * 1000n + } if (spanId) { labels['span id'] = spanId From c8e88ee96e330bd397e75184fe158d2b08bc3af7 Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Wed, 20 Dec 2023 12:22:25 +0100 Subject: [PATCH 127/147] API security sample rate via RC (#3868) * New RC ASM_API_SECURITY_SAMPLE_RATE capability * Do not report ASM_API_SECURITY_SAMPLE_RATE capability if apiSecurity is not enabled * Change Activation constants case * Update ASM_API_SECURITY_SAMPLE_RATE capability when enabling RemoteConfig * Apply same coertion logic as in config.js * Clean up * Do not turn off ASM_API_SECURITY_SAMPLE_RATE when disabling waf update * Include api_security_sampler test case * Fix test comment * remove NOTE --- packages/dd-trace/src/appsec/activation.js | 29 +++ .../src/appsec/api_security_sampler.js | 48 +++++ packages/dd-trace/src/appsec/index.js | 17 +- .../src/appsec/remote_config/capabilities.js | 3 +- .../src/appsec/remote_config/index.js | 51 ++++-- .../dd-trace/test/appsec/activation.spec.js | 41 +++++ .../test/appsec/api_security_sampler.spec.js | 71 ++++++++ .../test/appsec/remote_config/index.spec.js | 168 ++++++++++++++++-- 8 files changed, 390 insertions(+), 38 deletions(-) create mode 100644 packages/dd-trace/src/appsec/activation.js create mode 100644 packages/dd-trace/src/appsec/api_security_sampler.js create mode 100644 packages/dd-trace/test/appsec/activation.spec.js create mode 100644 packages/dd-trace/test/appsec/api_security_sampler.spec.js diff --git a/packages/dd-trace/src/appsec/activation.js b/packages/dd-trace/src/appsec/activation.js new file mode 100644 index 00000000000..8ed6a26fa54 --- /dev/null +++ b/packages/dd-trace/src/appsec/activation.js @@ -0,0 +1,29 @@ +'use strict' + +const Activation = { + ONECLICK: 'OneClick', + ENABLED: 'Enabled', + DISABLED: 'Disabled', + + fromConfig (config) { + switch (config.appsec.enabled) { + // ASM is activated by an env var DD_APPSEC_ENABLED=true + case true: + return Activation.ENABLED + + // ASM is disabled by an env var DD_APPSEC_ENABLED=false + case false: + return Activation.DISABLED + + // ASM is activated by one click remote config + case undefined: + return Activation.ONECLICK + + // Any other value should never occur + default: + return Activation.DISABLED + } + } +} + +module.exports = Activation diff --git a/packages/dd-trace/src/appsec/api_security_sampler.js b/packages/dd-trace/src/appsec/api_security_sampler.js new file mode 100644 index 00000000000..3d92288d1c1 --- /dev/null +++ b/packages/dd-trace/src/appsec/api_security_sampler.js @@ -0,0 +1,48 @@ +'use strict' + +const log = require('../log') + +let enabled +let requestSampling + +function configure ({ apiSecurity }) { + enabled = apiSecurity.enabled + setRequestSampling(apiSecurity.requestSampling) +} + +function disable () { + enabled = false +} + +function setRequestSampling (sampling) { + requestSampling = parseRequestSampling(sampling) +} + +function parseRequestSampling (requestSampling) { + let parsed = parseFloat(requestSampling) + + if (isNaN(parsed)) { + log.warn(`Incorrect API Security request sampling value: ${requestSampling}`) + + parsed = 0 + } else { + parsed = Math.min(1, Math.max(0, parsed)) + } + + return parsed +} + +function sampleRequest () { + if (!enabled || !requestSampling) { + return false + } + + return Math.random() <= requestSampling +} + +module.exports = { + configure, + disable, + setRequestSampling, + sampleRequest +} diff --git a/packages/dd-trace/src/appsec/index.js b/packages/dd-trace/src/appsec/index.js index f072cc044b0..deb6df86e3e 100644 --- a/packages/dd-trace/src/appsec/index.js +++ b/packages/dd-trace/src/appsec/index.js @@ -17,6 +17,7 @@ const waf = require('./waf') const addresses = require('./addresses') const Reporter = require('./reporter') const appsecTelemetry = require('./telemetry') +const apiSecuritySampler = require('./api_security_sampler') const web = require('../plugins/util/web') const { extractIp } = require('../plugins/util/ip_extractor') const { HTTP_CLIENT_IP } = require('../../../../ext/tags') @@ -28,14 +29,6 @@ const graphql = require('./graphql') let isEnabled = false let config -function sampleRequest ({ enabled, requestSampling }) { - if (!enabled || !requestSampling) { - return false - } - - return Math.random() <= requestSampling -} - function enable (_config) { if (isEnabled) return @@ -51,6 +44,8 @@ function enable (_config) { Reporter.setRateLimit(_config.appsec.rateLimit) + apiSecuritySampler.configure(_config.appsec) + incomingHttpRequestStart.subscribe(incomingHttpStartTranslator) incomingHttpRequestEnd.subscribe(incomingHttpEndTranslator) bodyParser.subscribe(onRequestBodyParsed) @@ -98,7 +93,7 @@ function incomingHttpStartTranslator ({ req, res, abortController }) { payload[addresses.HTTP_CLIENT_IP] = clientIp } - if (sampleRequest(config.appsec.apiSecurity)) { + if (apiSecuritySampler.sampleRequest()) { payload[addresses.WAF_CONTEXT_PROCESSOR] = { 'extract-schema': true } } @@ -195,7 +190,7 @@ function onRequestCookieParser ({ req, res, abortController, cookies }) { function onPassportVerify ({ credentials, user }) { const store = storage.getStore() - const rootSpan = store && store.req && web.root(store.req) + const rootSpan = store?.req && web.root(store.req) if (!rootSpan) { log.warn('No rootSpan found in onPassportVerify') @@ -224,6 +219,8 @@ function disable () { remoteConfig.disableWafUpdate() + apiSecuritySampler.disable() + // Channel#unsubscribe() is undefined for non active channels if (bodyParser.hasSubscribers) bodyParser.unsubscribe(onRequestBodyParsed) if (incomingHttpRequestStart.hasSubscribers) incomingHttpRequestStart.unsubscribe(incomingHttpStartTranslator) diff --git a/packages/dd-trace/src/appsec/remote_config/capabilities.js b/packages/dd-trace/src/appsec/remote_config/capabilities.js index 94141438db5..6032b6543c2 100644 --- a/packages/dd-trace/src/appsec/remote_config/capabilities.js +++ b/packages/dd-trace/src/appsec/remote_config/capabilities.js @@ -9,5 +9,6 @@ module.exports = { ASM_USER_BLOCKING: 1n << 7n, ASM_CUSTOM_RULES: 1n << 8n, ASM_CUSTOM_BLOCKING_RESPONSE: 1n << 9n, - ASM_TRUSTED_IPS: 1n << 10n + ASM_TRUSTED_IPS: 1n << 10n, + ASM_API_SECURITY_SAMPLE_RATE: 1n << 11n } diff --git a/packages/dd-trace/src/appsec/remote_config/index.js b/packages/dd-trace/src/appsec/remote_config/index.js index f7c6118598a..08d912d22e1 100644 --- a/packages/dd-trace/src/appsec/remote_config/index.js +++ b/packages/dd-trace/src/appsec/remote_config/index.js @@ -1,38 +1,59 @@ 'use strict' +const Activation = require('../activation') + const RemoteConfigManager = require('./manager') const RemoteConfigCapabilities = require('./capabilities') +const apiSecuritySampler = require('../api_security_sampler') let rc function enable (config) { rc = new RemoteConfigManager(config) - if (config.appsec.enabled === undefined) { // only activate ASM_FEATURES when conf is not set locally - rc.updateCapabilities(RemoteConfigCapabilities.ASM_ACTIVATION, true) + const activation = Activation.fromConfig(config) + + if (activation !== Activation.DISABLED) { + if (activation === Activation.ONECLICK) { + rc.updateCapabilities(RemoteConfigCapabilities.ASM_ACTIVATION, true) + } - rc.on('ASM_FEATURES', (action, conf) => { - if (conf && conf.asm && typeof conf.asm.enabled === 'boolean') { - let shouldEnable + if (config.appsec.apiSecurity?.enabled) { + rc.updateCapabilities(RemoteConfigCapabilities.ASM_API_SECURITY_SAMPLE_RATE, true) + } - if (action === 'apply' || action === 'modify') { - shouldEnable = conf.asm.enabled // take control - } else { - shouldEnable = config.appsec.enabled // give back control to local config - } + rc.on('ASM_FEATURES', (action, rcConfig) => { + if (!rcConfig) return - if (shouldEnable) { - require('..').enable(config) - } else { - require('..').disable() - } + if (activation === Activation.ONECLICK) { + enableOrDisableAppsec(action, rcConfig, config) } + + apiSecuritySampler.setRequestSampling(rcConfig.api_security?.request_sample_rate) }) } return rc } +function enableOrDisableAppsec (action, rcConfig, config) { + if (typeof rcConfig.asm?.enabled === 'boolean') { + let shouldEnable + + if (action === 'apply' || action === 'modify') { + shouldEnable = rcConfig.asm.enabled // take control + } else { + shouldEnable = config.appsec.enabled // give back control to local config + } + + if (shouldEnable) { + require('..').enable(config) + } else { + require('..').disable() + } + } +} + function enableWafUpdate (appsecConfig) { if (rc && appsecConfig && !appsecConfig.customRulesProvided) { // dirty require to make startup faster for serverless diff --git a/packages/dd-trace/test/appsec/activation.spec.js b/packages/dd-trace/test/appsec/activation.spec.js new file mode 100644 index 00000000000..7ebf2ee599f --- /dev/null +++ b/packages/dd-trace/test/appsec/activation.spec.js @@ -0,0 +1,41 @@ +'use strict' + +const Activation = require('../../src/appsec/activation') + +describe('Appsec Activation', () => { + let config + + beforeEach(() => { + config = { + appsec: {} + } + }) + + it('should return ONECLICK with undefined value', () => { + config.appsec.enabled = undefined + const activation = Activation.fromConfig(config) + + expect(activation).to.equal(Activation.ONECLICK) + }) + + it('should return ENABLED with true value', () => { + config.appsec.enabled = true + const activation = Activation.fromConfig(config) + + expect(activation).to.equal(Activation.ENABLED) + }) + + it('should return DISABLED with false value', () => { + config.appsec.enabled = false + const activation = Activation.fromConfig(config) + + expect(activation).to.equal(Activation.DISABLED) + }) + + it('should return DISABLED with invalid value', () => { + config.appsec.enabled = 'invalid' + const activation = Activation.fromConfig(config) + + expect(activation).to.equal(Activation.DISABLED) + }) +}) diff --git a/packages/dd-trace/test/appsec/api_security_sampler.spec.js b/packages/dd-trace/test/appsec/api_security_sampler.spec.js new file mode 100644 index 00000000000..e36e588ba39 --- /dev/null +++ b/packages/dd-trace/test/appsec/api_security_sampler.spec.js @@ -0,0 +1,71 @@ +'use strict' + +const apiSecuritySampler = require('../../src/appsec/api_security_sampler') + +describe('Api Security Sampler', () => { + let config + + beforeEach(() => { + config = { + apiSecurity: { + enabled: true, + requestSampling: 1 + } + } + + sinon.stub(Math, 'random').returns(0.3) + }) + + afterEach(sinon.restore) + + describe('sampleRequest', () => { + it('should sample request if enabled and sampling 1', () => { + apiSecuritySampler.configure(config) + + expect(apiSecuritySampler.sampleRequest()).to.true + }) + + it('should not sample request if enabled and sampling 0', () => { + config.apiSecurity.requestSampling = 0 + apiSecuritySampler.configure(config) + + expect(apiSecuritySampler.sampleRequest()).to.false + }) + + it('should sample request if enabled and sampling greater than random', () => { + config.apiSecurity.requestSampling = 0.5 + + apiSecuritySampler.configure(config) + + expect(apiSecuritySampler.sampleRequest()).to.true + }) + + it('should not sample request if enabled and sampling less than random', () => { + config.apiSecurity.requestSampling = 0.1 + + apiSecuritySampler.configure(config) + + expect(apiSecuritySampler.sampleRequest()).to.false + }) + + it('should not sample request if incorrect config value', () => { + config.apiSecurity.requestSampling = NaN + + apiSecuritySampler.configure(config) + + expect(apiSecuritySampler.sampleRequest()).to.false + }) + + it('should sample request according to the config', () => { + config.apiSecurity.requestSampling = 1 + + apiSecuritySampler.configure(config) + + expect(apiSecuritySampler.sampleRequest()).to.true + + apiSecuritySampler.setRequestSampling(0) + + expect(apiSecuritySampler.sampleRequest()).to.false + }) + }) +}) diff --git a/packages/dd-trace/test/appsec/remote_config/index.spec.js b/packages/dd-trace/test/appsec/remote_config/index.spec.js index 9712463923f..6287c693ddc 100644 --- a/packages/dd-trace/test/appsec/remote_config/index.spec.js +++ b/packages/dd-trace/test/appsec/remote_config/index.spec.js @@ -9,6 +9,7 @@ let RemoteConfigManager let RuleManager let appsec let remoteConfig +let apiSecuritySampler describe('Remote Config index', () => { beforeEach(() => { @@ -30,6 +31,11 @@ describe('Remote Config index', () => { updateWafFromRC: sinon.stub() } + apiSecuritySampler = { + configure: sinon.stub(), + setRequestSampling: sinon.stub() + } + appsec = { enable: sinon.spy(), disable: sinon.spy() @@ -38,6 +44,7 @@ describe('Remote Config index', () => { remoteConfig = proxyquire('../src/appsec/remote_config', { './manager': RemoteConfigManager, '../rule_manager': RuleManager, + '../api_security_sampler': apiSecuritySampler, '..': appsec }) }) @@ -54,7 +61,18 @@ describe('Remote Config index', () => { expect(rc.on.firstCall.args[1]).to.be.a('function') }) - it('should not listen to remote config when appsec is explicitly configured', () => { + it('should listen to remote config when appsec is explicitly configured as enabled=true', () => { + config.appsec = { enabled: true } + + remoteConfig.enable(config) + + expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) + expect(rc.updateCapabilities).to.not.have.been.called + expect(rc.on).to.have.been.calledOnceWith('ASM_FEATURES') + expect(rc.on.firstCall.args[1]).to.be.a('function') + }) + + it('should not listen to remote config when appsec is explicitly configured as enabled=false', () => { config.appsec = { enabled: false } remoteConfig.enable(config) @@ -64,6 +82,30 @@ describe('Remote Config index', () => { expect(rc.on).to.not.have.been.called }) + it('should listen ASM_API_SECURITY_SAMPLE_RATE when appsec.enabled=undefined and appSecurity.enabled=true', () => { + config.appsec = { enabled: undefined, apiSecurity: { enabled: true } } + + remoteConfig.enable(config) + + expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) + expect(rc.updateCapabilities).to.have.been.calledTwice + expect(rc.updateCapabilities.firstCall) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ACTIVATION, true) + expect(rc.updateCapabilities.secondCall) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_API_SECURITY_SAMPLE_RATE, true) + }) + + it('should listen ASM_API_SECURITY_SAMPLE_RATE when appsec.enabled=true and appSecurity.enabled=true', () => { + config.appsec = { enabled: true, apiSecurity: { enabled: true } } + + remoteConfig.enable(config) + + expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) + expect(rc.updateCapabilities).to.have.been.calledOnce + expect(rc.updateCapabilities.firstCall) + .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_API_SECURITY_SAMPLE_RATE, true) + }) + describe('ASM_FEATURES remote config listener', () => { let listener @@ -100,6 +142,106 @@ describe('Remote Config index', () => { expect(appsec.disable).to.not.have.been.called }) }) + + describe('API Security Request Sampling', () => { + describe('OneClick', () => { + let listener + + beforeEach(() => { + config = { + appsec: { + enabled: undefined, + apiSecurity: { + requestSampling: 0.1 + } + } + } + + remoteConfig.enable(config) + + listener = rc.on.firstCall.args[1] + }) + + it('should update apiSecuritySampler config', () => { + listener('apply', { + api_security: { + request_sample_rate: 0.5 + } + }) + + expect(apiSecuritySampler.setRequestSampling).to.be.calledOnceWithExactly(0.5) + }) + + it('should update apiSecuritySampler config and disable it', () => { + listener('apply', { + api_security: { + request_sample_rate: 0 + } + }) + + expect(apiSecuritySampler.setRequestSampling).to.be.calledOnceWithExactly(0) + }) + + it('should not update apiSecuritySampler config with values greater than 1', () => { + listener('apply', { + api_security: { + request_sample_rate: 5 + } + }) + + expect(apiSecuritySampler.configure).to.not.be.called + }) + + it('should not update apiSecuritySampler config with values less than 0', () => { + listener('apply', { + api_security: { + request_sample_rate: -0.4 + } + }) + + expect(apiSecuritySampler.configure).to.not.be.called + }) + + it('should not update apiSecuritySampler config with incorrect values', () => { + listener('apply', { + api_security: { + request_sample_rate: 'not_a_number' + } + }) + + expect(apiSecuritySampler.configure).to.not.be.called + }) + }) + + describe('Enabled', () => { + let listener + + beforeEach(() => { + config = { + appsec: { + enabled: true, + apiSecurity: { + requestSampling: 0.1 + } + } + } + + remoteConfig.enable(config) + + listener = rc.on.firstCall.args[1] + }) + + it('should update config apiSecurity.requestSampling property value', () => { + listener('apply', { + api_security: { + request_sample_rate: 0.5 + } + }) + + expect(apiSecuritySampler.setRequestSampling).to.be.calledOnceWithExactly(0.5) + }) + }) + }) }) describe('enableWafUpdate', () => { @@ -118,7 +260,7 @@ describe('Remote Config index', () => { remoteConfig.enableWafUpdate(config.appsec) expect(rc.updateCapabilities).to.not.have.been.called - expect(rc.on).to.not.have.been.called + expect(rc.on).to.have.been.called }) it('should enable when using default rules', () => { @@ -144,11 +286,12 @@ describe('Remote Config index', () => { expect(rc.updateCapabilities.getCall(7)) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) - expect(rc.on.callCount).to.be.equal(4) - expect(rc.on.getCall(0)).to.have.been.calledWith('ASM_DATA') - expect(rc.on.getCall(1)).to.have.been.calledWith('ASM_DD') - expect(rc.on.getCall(2)).to.have.been.calledWith('ASM') - expect(rc.on.getCall(3)).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) + expect(rc.on.callCount).to.be.equal(5) + expect(rc.on.getCall(0)).to.have.been.calledWith('ASM_FEATURES') + expect(rc.on.getCall(1)).to.have.been.calledWith('ASM_DATA') + expect(rc.on.getCall(2)).to.have.been.calledWith('ASM_DD') + expect(rc.on.getCall(3)).to.have.been.calledWith('ASM') + expect(rc.on.getCall(4)).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) }) it('should activate if appsec is manually enabled', () => { @@ -174,11 +317,12 @@ describe('Remote Config index', () => { expect(rc.updateCapabilities.getCall(7)) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) - expect(rc.on.callCount).to.be.equal(4) - expect(rc.on.getCall(0)).to.have.been.calledWith('ASM_DATA') - expect(rc.on.getCall(1)).to.have.been.calledWith('ASM_DD') - expect(rc.on.getCall(2)).to.have.been.calledWith('ASM') - expect(rc.on.getCall(3)).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) + expect(rc.on.callCount).to.be.equal(5) + expect(rc.on.getCall(0)).to.have.been.calledWith('ASM_FEATURES') + expect(rc.on.getCall(1)).to.have.been.calledWith('ASM_DATA') + expect(rc.on.getCall(2)).to.have.been.calledWith('ASM_DD') + expect(rc.on.getCall(3)).to.have.been.calledWith('ASM') + expect(rc.on.getCall(4)).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) }) it('should activate if appsec enabled is not defined', () => { From 2407515660d39ca47db302a9233442749f501217 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Wed, 20 Dec 2023 15:39:36 +0100 Subject: [PATCH 128/147] PROF-8829: Fix recording times (#3891) * Ensure recording start time of next profile is the same as the recording end time of the current profile. * Pass the start-end range to profilers so they can use it * Use passed-in start-end instead of internal duration computations * Use end date to cut off late events for the next profile --- packages/dd-trace/src/profiling/profiler.js | 13 +++++----- .../src/profiling/profilers/events.js | 25 +++++++++++-------- 2 files changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/dd-trace/src/profiling/profiler.js b/packages/dd-trace/src/profiling/profiler.js index 4e5882189a9..1f7a1f36a1b 100644 --- a/packages/dd-trace/src/profiling/profiler.js +++ b/packages/dd-trace/src/profiling/profiler.js @@ -61,6 +61,7 @@ class Profiler extends EventEmitter { } try { + const start = new Date() for (const profiler of config.profilers) { // TODO: move this out of Profiler when restoring sourcemap support profiler.start({ @@ -70,7 +71,7 @@ class Profiler extends EventEmitter { this._logger.debug(`Started ${profiler.type} profiler`) } - this._capture(this._timeoutInterval) + this._capture(this._timeoutInterval, start) return true } catch (e) { this._logger.error(e) @@ -116,9 +117,9 @@ class Profiler extends EventEmitter { return this } - _capture (timeout) { + _capture (timeout, start) { if (!this._enabled) return - this._lastStart = new Date() + this._lastStart = start if (!this._timer || timeout !== this._timeoutInterval) { this._timer = setTimeout(() => this._collect(snapshotKinds.PERIODIC), timeout) this._timer.unref() @@ -138,7 +139,7 @@ class Profiler extends EventEmitter { try { // collect profiles synchronously so that profilers can be safely stopped asynchronously for (const profiler of this._config.profilers) { - const profile = profiler.profile() + const profile = profiler.profile(start, end) if (!profile) continue profiles.push({ profiler, profile }) } @@ -154,7 +155,7 @@ class Profiler extends EventEmitter { }) } - this._capture(this._timeoutInterval) + this._capture(this._timeoutInterval, end) await this._submit(encodedProfiles, start, end, snapshotKind) this._logger.debug('Submitted profiles') } catch (err) { @@ -201,7 +202,7 @@ class ServerlessProfiler extends Profiler { await super._collect(snapshotKind) } else { this._profiledIntervals += 1 - this._capture(this._timeoutInterval) + this._capture(this._timeoutInterval, new Date()) // Don't submit profile until 65 (flushAfterIntervals) intervals have elapsed } } diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index 03fa78300e8..5c743bb96b2 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -174,7 +174,7 @@ class EventsProfiler { } } - profile () { + profile (startDate, endDate) { if (this.entries.length === 0) { // No events in the period; don't produce a profile return null @@ -204,10 +204,9 @@ class EventsProfiler { } const timestampLabelKey = stringTable.dedup(END_TIMESTAMP_LABEL) - let durationFrom = Number.POSITIVE_INFINITY - let durationTo = 0 const dateOffset = BigInt(Math.round(performance.timeOrigin * MS_TO_NS)) - + const lateEntries = [] + const perfEndDate = endDate.getTime() - performance.timeOrigin const samples = this.entries.map((item) => { const decorator = decorators[item.entryType] if (!decorator) { @@ -216,9 +215,15 @@ class EventsProfiler { return null } const { startTime, duration } = item + if (startTime >= perfEndDate) { + // An event past the current recording end date; save it for the next + // profile. Not supposed to happen as long as there's no async activity + // between capture of the endDate value in profiler.js _collect() and + // here, but better be safe than sorry. + lateEntries.push(item) + return null + } const endTime = startTime + duration - if (durationFrom > startTime) durationFrom = startTime - if (durationTo < endTime) durationTo = endTime const sampleInput = { value: [Math.round(duration * MS_TO_NS)], locationId, @@ -231,7 +236,7 @@ class EventsProfiler { return new Sample(sampleInput) }).filter(v => v) - this.entries = [] + this.entries = lateEntries const timeValueType = new ValueType({ type: stringTable.dedup(pprofValueType), @@ -240,10 +245,10 @@ class EventsProfiler { return new Profile({ sampleType: [timeValueType], - timeNanos: dateOffset + BigInt(Math.round(durationFrom * MS_TO_NS)), + timeNanos: endDate.getTime() * MS_TO_NS, periodType: timeValueType, - period: this._flushIntervalNanos, - durationNanos: Math.max(0, Math.round((durationTo - durationFrom) * MS_TO_NS)), + period: 1, + durationNanos: (endDate.getTime() - startDate.getTime()) * MS_TO_NS, sample: samples, location: locations, function: functions, From c4dfa6531797252165f9d5b93289a20d254f5c27 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Wed, 20 Dec 2023 22:57:10 +0800 Subject: [PATCH 129/147] Revert "[core] Ensure Explicit Timeouts from Underlying Request Socket are Recorded as Errors When Using Node 20 (#3853)" (#3896) This reverts commit 59c8ea4e7c4dd025e6a5f2ec0e9f939e44f8a70a. --- .../src/http/client.js | 16 ++------- packages/datadog-plugin-http/src/client.js | 2 +- .../datadog-plugin-http/test/client.spec.js | 33 ------------------- .../test/integration-test/server.mjs | 4 +-- .../test/integration-test/server.mjs | 4 +-- 5 files changed, 5 insertions(+), 54 deletions(-) diff --git a/packages/datadog-instrumentations/src/http/client.js b/packages/datadog-instrumentations/src/http/client.js index 33ac14e4376..fcf5cc05f0a 100644 --- a/packages/datadog-instrumentations/src/http/client.js +++ b/packages/datadog-instrumentations/src/http/client.js @@ -69,29 +69,17 @@ function patch (http, methodName) { try { const req = request.call(this, options, callback) const emit = req.emit - - const requestSetTimeout = req.setTimeout + const setTimeout = req.setTimeout ctx.req = req // tracked to accurately discern custom request socket timeout let customRequestTimeout = false - req.setTimeout = function () { customRequestTimeout = true - return requestSetTimeout.apply(this, arguments) + return setTimeout.apply(this, arguments) } - req.on('socket', socket => { - if (socket) { - const socketSetTimeout = socket.setTimeout - socket.setTimeout = function () { - customRequestTimeout = true - return socketSetTimeout.apply(this, arguments) - } - } - }) - req.emit = function (eventName, arg) { switch (eventName) { case 'response': { diff --git a/packages/datadog-plugin-http/src/client.js b/packages/datadog-plugin-http/src/client.js index 65081dee34e..42833bb896f 100644 --- a/packages/datadog-plugin-http/src/client.js +++ b/packages/datadog-plugin-http/src/client.js @@ -121,7 +121,7 @@ class HttpClientPlugin extends ClientPlugin { } else { // conditions for no error: // 1. not using a custom agent instance with custom timeout specified - // 2. no invocation of `req.setTimeout` or `socket.setTimeout` + // 2. no invocation of `req.setTimeout` if (!args.options.agent?.options.timeout && !customRequestTimeout) return span.setTag('error', 1) diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index 3c50a19f228..7256950ac83 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -900,39 +900,6 @@ describe('Plugin', () => { }) }) }).timeout(10000) - - it('should record error if req.socket.setTimeout is used with Node 20', done => { - const app = express() - - app.get('/user', async (req, res) => { - await new Promise(resolve => { - setTimeout(resolve, 6 * 1000) - }) - res.status(200).send() - }) - - getPort().then(port => { - agent - .use(traces => { - expect(traces[0][0]).to.have.property('error', 1) - }) - .then(done) - .catch(done) - - appListener = server(app, port, async () => { - const req = http.request(`${protocol}://localhost:${port}/user`, res => { - res.on('data', () => { }) - }) - - req.on('error', () => {}) - req.on('socket', socket => { - socket.setTimeout(5000)// match default timeout - }) - - req.end() - }) - }) - }).timeout(10000) } it('should only record a request once', done => { diff --git a/packages/datadog-plugin-http/test/integration-test/server.mjs b/packages/datadog-plugin-http/test/integration-test/server.mjs index 01f59e02003..762cb7e9c84 100644 --- a/packages/datadog-plugin-http/test/integration-test/server.mjs +++ b/packages/datadog-plugin-http/test/integration-test/server.mjs @@ -10,7 +10,5 @@ const server = http.createServer(async (req, res) => { } }).listen(0, () => { const port = server.address().port - if (process.send) { - process.send({ port }) - } + process.send({ port }) }) diff --git a/packages/datadog-plugin-http2/test/integration-test/server.mjs b/packages/datadog-plugin-http2/test/integration-test/server.mjs index d3a13bcc780..861e64dc7bd 100644 --- a/packages/datadog-plugin-http2/test/integration-test/server.mjs +++ b/packages/datadog-plugin-http2/test/integration-test/server.mjs @@ -7,7 +7,5 @@ const server = http2.createServer((req, res) => { server.listen(0, () => { const port = server.address().port - if (process.send) { - process.send({ port }) - } + process.send({ port }) }) From 0163883cb62e7ab53d70d7564eb2bef303cbc91a Mon Sep 17 00:00:00 2001 From: Thomas Hunter II Date: Wed, 20 Dec 2023 11:33:16 -0800 Subject: [PATCH 130/147] github: suggest new issues use helpdesk instead (#3894) * github: suggest new issues use helpdesk instead * remove deprecated bug report / feature request options --- .github/ISSUE_TEMPLATE/bug_report.md | 34 ----------------------- .github/ISSUE_TEMPLATE/config.yml | 9 ++++++ .github/ISSUE_TEMPLATE/feature_request.md | 8 ------ 3 files changed, 9 insertions(+), 42 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/config.yml delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index b4638274d1e..00000000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,34 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: bug -assignees: '' - ---- - - -**Expected behaviour** - - -**Actual behaviour** - - -**Steps to reproduce** - - -**Environment** - -* **Operation system:** -* **Node.js version:** -* **Tracer version:** -* **Agent version:** -* **Relevant library versions:** - - diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000000..b5a5eb1d199 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,9 @@ +blank_issues_enabled: true +contact_links: + - name: Bug Report + url: https://help.datadoghq.com/hc/en-us/requests/new?tf_1260824651490=pt_product_type:apm&tf_1900004146284=pt_apm_language:node + about: This option creates an expedited Bug Report via the helpdesk (no login required). This will allow us to look up your account and allows you to provide additional information in private. Please do not create a GitHub issue to report a bug. + - name: Feature Request + url: https://help.datadoghq.com/hc/en-us/requests/new?tf_1260824651490=pt_product_type:apm&tf_1900004146284=pt_apm_language:node&tf_1260825272270=pt_apm_category_feature_request + about: This option creates an expedited Feature Request via the helpdesk (no login required). This helps with prioritization and allows you to provide additional information in private. Please do not create a GitHub issue to request a feature. + diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 47b34bf567b..00000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,8 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: feature-request -assignees: '' - ---- From e3bb52c0222f52ad562137c3e391a2794a3b35ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonathan=20Morales=20V=C3=A9lez?= Date: Wed, 20 Dec 2023 23:05:32 +0100 Subject: [PATCH 131/147] docs: fix typos and improve grammar. (#3806) --- docs/API.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/API.md b/docs/API.md index 5902b284e60..afd8749aab9 100644 --- a/docs/API.md +++ b/docs/API.md @@ -8,7 +8,7 @@ The module exported by this library is an instance of the [Tracer](./interfaces/

Automatic Instrumentation

-APM provides out-of-the-box instrumentation for many popular frameworks and libraries by using a plugin system. By default all built-in plugins are enabled. Disabling plugins can cause unexpected side effects, so it is highly recommended to leave them enabled. +APM provides out-of-the-box instrumentation for many popular frameworks and libraries by using a plugin system. By default, all built-in plugins are enabled. Disabling plugins can cause unexpected side effects, so it is highly recommended to leave them enabled. Built-in plugins can be configured individually: @@ -192,7 +192,7 @@ Errors passed to the callback will automatically be added to the span.

Promise

-For promises, the span will be finished afer the promise has been either resolved or rejected. +For promises, the span will be finished after the promise has been either resolved or rejected. ```javascript function handle () { @@ -260,7 +260,7 @@ This method returns the active span from the current scope.

scope.activate(span, fn)

This method activates the provided span in a new scope available in the -provided function. Any asynchronous context created from whithin that function +provided function. Any asynchronous context created from within that function will also have the same scope. ```javascript @@ -342,7 +342,7 @@ const opentracing = require('opentracing') opentracing.initGlobalTracer(tracer) ``` -The following tags are available to override Datadog specific options: +The following tags are available to override Datadog-specific options: * `service.name`: The service name to be used for this span. The service name from the tracer will be used if this is not provided. * `resource.name`: The resource name to be used for this span. The operation name will be used if this is not provided. @@ -359,7 +359,7 @@ const tracerProvider = new tracer.TracerProvider() tracerProvider.register() ``` -The following attributes are available to override Datadog specific options: +The following attributes are available to override Datadog-specific options: * `service.name`: The service name to be used for this span. The service name from the tracer will be used if this is not provided. * `resource.name`: The resource name to be used for this span. The operation name will be used if this is not provided. From a5199423fa3c60fd9964895c9f6000e31dbd9aeb Mon Sep 17 00:00:00 2001 From: Julio Gonzalez <107922352+hoolioh@users.noreply.github.com> Date: Thu, 21 Dec 2023 11:12:57 +0100 Subject: [PATCH 132/147] Pass resolver address as ephemeral type (#3897) * Update waf bindings to 6.0.0. * Pass graphql.server.resolver as ephemeral address type. * Add test. --- package.json | 2 +- packages/dd-trace/src/appsec/graphql.js | 2 +- packages/dd-trace/src/appsec/index.js | 32 ++++--- .../dd-trace/src/appsec/sdk/user_blocking.js | 2 +- .../src/appsec/waf/waf_context_wrapper.js | 38 +++++--- packages/dd-trace/test/appsec/graphql.spec.js | 30 +++--- packages/dd-trace/test/appsec/index.spec.js | 92 ++++++++++++------- .../test/appsec/sdk/user_blocking.spec.js | 4 +- .../dd-trace/test/appsec/waf/index.spec.js | 44 ++++++--- .../appsec/waf/waf_context_wrapper.spec.js | 33 ++++++- yarn.lock | 8 +- 11 files changed, 189 insertions(+), 98 deletions(-) diff --git a/package.json b/package.json index 75beb5dc68b..a54eab1239b 100644 --- a/package.json +++ b/package.json @@ -68,7 +68,7 @@ "node": ">=16" }, "dependencies": { - "@datadog/native-appsec": "5.0.0", + "@datadog/native-appsec": "6.0.0", "@datadog/native-iast-rewriter": "2.2.2", "@datadog/native-iast-taint-tracking": "1.6.4", "@datadog/native-metrics": "^2.0.0", diff --git a/packages/dd-trace/src/appsec/graphql.js b/packages/dd-trace/src/appsec/graphql.js index cf819e4382f..52a17acdf3a 100644 --- a/packages/dd-trace/src/appsec/graphql.js +++ b/packages/dd-trace/src/appsec/graphql.js @@ -31,7 +31,7 @@ function onGraphqlStartResolve ({ context, resolverInfo }) { if (!resolverInfo || typeof resolverInfo !== 'object') return - const actions = waf.run({ [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo }, req) + const actions = waf.run({ ephemeral: { [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo } }, req) if (actions?.includes('block')) { const requestData = graphqlRequestData.get(req) if (requestData?.isInGraphqlRequest) { diff --git a/packages/dd-trace/src/appsec/index.js b/packages/dd-trace/src/appsec/index.js index deb6df86e3e..90ac6d1f5f3 100644 --- a/packages/dd-trace/src/appsec/index.js +++ b/packages/dd-trace/src/appsec/index.js @@ -83,21 +83,21 @@ function incomingHttpStartTranslator ({ req, res, abortController }) { const requestHeaders = Object.assign({}, req.headers) delete requestHeaders.cookie - const payload = { + const persistent = { [addresses.HTTP_INCOMING_URL]: req.url, [addresses.HTTP_INCOMING_HEADERS]: requestHeaders, [addresses.HTTP_INCOMING_METHOD]: req.method } if (clientIp) { - payload[addresses.HTTP_CLIENT_IP] = clientIp + persistent[addresses.HTTP_CLIENT_IP] = clientIp } if (apiSecuritySampler.sampleRequest()) { - payload[addresses.WAF_CONTEXT_PROCESSOR] = { 'extract-schema': true } + persistent[addresses.WAF_CONTEXT_PROCESSOR] = { 'extract-schema': true } } - const actions = waf.run(payload, req) + const actions = waf.run({ persistent }, req) handleResults(actions, req, res, rootSpan, abortController) } @@ -107,7 +107,7 @@ function incomingHttpEndTranslator ({ req, res }) { const responseHeaders = Object.assign({}, res.getHeaders()) delete responseHeaders['set-cookie'] - const payload = { + const persistent = { [addresses.HTTP_INCOMING_RESPONSE_CODE]: '' + res.statusCode, [addresses.HTTP_INCOMING_RESPONSE_HEADERS]: responseHeaders } @@ -115,24 +115,24 @@ function incomingHttpEndTranslator ({ req, res }) { // we need to keep this to support other body parsers // TODO: no need to analyze it if it was already done by the body-parser hook if (req.body !== undefined && req.body !== null) { - payload[addresses.HTTP_INCOMING_BODY] = req.body + persistent[addresses.HTTP_INCOMING_BODY] = req.body } // TODO: temporary express instrumentation, will use express plugin later if (req.params && typeof req.params === 'object') { - payload[addresses.HTTP_INCOMING_PARAMS] = req.params + persistent[addresses.HTTP_INCOMING_PARAMS] = req.params } // we need to keep this to support other cookie parsers if (req.cookies && typeof req.cookies === 'object') { - payload[addresses.HTTP_INCOMING_COOKIES] = req.cookies + persistent[addresses.HTTP_INCOMING_COOKIES] = req.cookies } if (req.query && typeof req.query === 'object') { - payload[addresses.HTTP_INCOMING_QUERY] = req.query + persistent[addresses.HTTP_INCOMING_QUERY] = req.query } - waf.run(payload, req) + waf.run({ persistent }, req) waf.disposeContext(req) @@ -151,7 +151,9 @@ function onRequestBodyParsed ({ req, res, body, abortController }) { if (!rootSpan) return const results = waf.run({ - [addresses.HTTP_INCOMING_BODY]: body + persistent: { + [addresses.HTTP_INCOMING_BODY]: body + } }, req) handleResults(results, req, res, rootSpan, abortController) @@ -169,7 +171,9 @@ function onRequestQueryParsed ({ req, res, query, abortController }) { if (!rootSpan) return const results = waf.run({ - [addresses.HTTP_INCOMING_QUERY]: query + persistent: { + [addresses.HTTP_INCOMING_QUERY]: query + } }, req) handleResults(results, req, res, rootSpan, abortController) @@ -182,7 +186,9 @@ function onRequestCookieParser ({ req, res, abortController, cookies }) { if (!rootSpan) return const results = waf.run({ - [addresses.HTTP_INCOMING_COOKIES]: cookies + persistent: { + [addresses.HTTP_INCOMING_COOKIES]: cookies + } }, req) handleResults(results, req, res, rootSpan, abortController) diff --git a/packages/dd-trace/src/appsec/sdk/user_blocking.js b/packages/dd-trace/src/appsec/sdk/user_blocking.js index e36686884ed..0c385b8202d 100644 --- a/packages/dd-trace/src/appsec/sdk/user_blocking.js +++ b/packages/dd-trace/src/appsec/sdk/user_blocking.js @@ -9,7 +9,7 @@ const { setUserTags } = require('./set_user') const log = require('../../log') function isUserBlocked (user) { - const actions = waf.run({ [USER_ID]: user.id }) + const actions = waf.run({ persistent: { [USER_ID]: user.id } }) if (!actions) return false diff --git a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js index 83ab3dcc1cd..de62d74ccfa 100644 --- a/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js +++ b/packages/dd-trace/src/appsec/waf/waf_context_wrapper.js @@ -18,30 +18,42 @@ class WAFContextWrapper { this.addressesToSkip = new Set() } - run (params) { + run ({ persistent, ephemeral }) { + const payload = {} + let payloadHasData = false const inputs = {} - let someInputAdded = false const newAddressesToSkip = new Set(this.addressesToSkip) - // TODO: possible optimizaion: only send params that haven't already been sent with same value to this wafContext - for (const key of Object.keys(params)) { - // TODO: requiredAddresses is no longer used due to processor addresses are not included in the list. Check on - // future versions when the actual addresses are included in the 'loaded' section inside diagnostics. - if (!this.addressesToSkip.has(key)) { - inputs[key] = params[key] - if (preventDuplicateAddresses.has(key)) { - newAddressesToSkip.add(key) + if (persistent && typeof persistent === 'object') { + // TODO: possible optimization: only send params that haven't already been sent with same value to this wafContext + for (const key of Object.keys(persistent)) { + // TODO: requiredAddresses is no longer used due to processor addresses are not included in the list. Check on + // future versions when the actual addresses are included in the 'loaded' section inside diagnostics. + if (!this.addressesToSkip.has(key)) { + inputs[key] = persistent[key] + if (preventDuplicateAddresses.has(key)) { + newAddressesToSkip.add(key) + } } - someInputAdded = true } } - if (!someInputAdded) return + if (Object.keys(inputs).length) { + payload['persistent'] = inputs + payloadHasData = true + } + + if (ephemeral && Object.keys(ephemeral).length) { + payload['ephemeral'] = ephemeral + payloadHasData = true + } + + if (!payloadHasData) return try { const start = process.hrtime.bigint() - const result = this.ddwafContext.run(inputs, this.wafTimeout) + const result = this.ddwafContext.run(payload, this.wafTimeout) const end = process.hrtime.bigint() diff --git a/packages/dd-trace/test/appsec/graphql.spec.js b/packages/dd-trace/test/appsec/graphql.spec.js index d0a459d4729..e34e8541780 100644 --- a/packages/dd-trace/test/appsec/graphql.spec.js +++ b/packages/dd-trace/test/appsec/graphql.spec.js @@ -147,12 +147,11 @@ describe('GraphQL', () => { startGraphqlResolve.publish({ context, resolverInfo }) - expect(waf.run).to.have.been.calledOnceWithExactly( - { + expect(waf.run).to.have.been.calledOnceWithExactly({ + ephemeral: { [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo - }, - {} - ) + } + }, {}) }) }) @@ -190,12 +189,12 @@ describe('GraphQL', () => { startGraphqlResolve.publish({ context, resolverInfo }) - expect(waf.run).to.have.been.calledOnceWithExactly( - { + expect(waf.run).to.have.been.calledOnceWithExactly({ + ephemeral: { [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo - }, - {} - ) + } + }, {}) + expect(context.abortController.abort).not.to.have.been.called apolloChannel.asyncEnd.publish({ abortController }) @@ -221,13 +220,14 @@ describe('GraphQL', () => { startGraphqlResolve.publish({ context, resolverInfo }) - expect(waf.run).to.have.been.calledOnceWithExactly( - { + expect(waf.run).to.have.been.calledOnceWithExactly({ + ephemeral: { [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: resolverInfo - }, - {} - ) + } + }, {}) + expect(context.abortController.abort).to.have.been.called + const abortData = {} apolloChannel.asyncEnd.publish({ abortController, abortData }) diff --git a/packages/dd-trace/test/appsec/index.spec.js b/packages/dd-trace/test/appsec/index.spec.js index dea33e01faf..82bb57059df 100644 --- a/packages/dd-trace/test/appsec/index.spec.js +++ b/packages/dd-trace/test/appsec/index.spec.js @@ -257,10 +257,12 @@ describe('AppSec Index', () => { 'http.client_ip': '127.0.0.1' }) expect(waf.run).to.have.been.calledOnceWithExactly({ - 'server.request.uri.raw': '/path', - 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, - 'server.request.method': 'POST', - 'http.client_ip': '127.0.0.1' + persistent: { + 'server.request.uri.raw': '/path', + 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, + 'server.request.method': 'POST', + 'http.client_ip': '127.0.0.1' + } }, req) }) }) @@ -307,8 +309,10 @@ describe('AppSec Index', () => { AppSec.incomingHttpEndTranslator({ req, res }) expect(waf.run).to.have.been.calledOnceWithExactly({ - 'server.response.status': '201', - 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 } + persistent: { + 'server.response.status': '201', + 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 } + } }, req) expect(Reporter.finishRequest).to.have.been.calledOnceWithExactly(req, res) @@ -348,8 +352,10 @@ describe('AppSec Index', () => { AppSec.incomingHttpEndTranslator({ req, res }) expect(waf.run).to.have.been.calledOnceWithExactly({ - 'server.response.status': '201', - 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 } + persistent: { + 'server.response.status': '201', + 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 } + } }, req) expect(Reporter.finishRequest).to.have.been.calledOnceWithExactly(req, res) @@ -399,12 +405,14 @@ describe('AppSec Index', () => { AppSec.incomingHttpEndTranslator({ req, res }) expect(waf.run).to.have.been.calledOnceWithExactly({ - 'server.response.status': '201', - 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 }, - 'server.request.body': { a: '1' }, - 'server.request.path_params': { c: '3' }, - 'server.request.cookies': { d: '4', e: '5' }, - 'server.request.query': { b: '2' } + persistent: { + 'server.response.status': '201', + 'server.response.headers.no_cookies': { 'content-type': 'application/json', 'content-lenght': 42 }, + 'server.request.body': { a: '1' }, + 'server.request.path_params': { c: '3' }, + 'server.request.cookies': { d: '4', e: '5' }, + 'server.request.query': { b: '2' } + } }, req) expect(Reporter.finishRequest).to.have.been.calledOnceWithExactly(req, res) }) @@ -447,10 +455,12 @@ describe('AppSec Index', () => { AppSec.incomingHttpStartTranslator({ req, res }) expect(waf.run).to.have.been.calledOnceWithExactly({ - 'server.request.uri.raw': '/path', - 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, - 'server.request.method': 'POST', - 'http.client_ip': '127.0.0.1' + persistent: { + 'server.request.uri.raw': '/path', + 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, + 'server.request.method': 'POST', + 'http.client_ip': '127.0.0.1' + } }, req) }) @@ -480,10 +490,12 @@ describe('AppSec Index', () => { AppSec.incomingHttpStartTranslator({ req, res }) expect(waf.run).to.have.been.calledOnceWithExactly({ - 'server.request.uri.raw': '/path', - 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, - 'server.request.method': 'POST', - 'http.client_ip': '127.0.0.1' + persistent: { + 'server.request.uri.raw': '/path', + 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, + 'server.request.method': 'POST', + 'http.client_ip': '127.0.0.1' + } }, req) }) @@ -513,11 +525,13 @@ describe('AppSec Index', () => { AppSec.incomingHttpStartTranslator({ req, res }) expect(waf.run).to.have.been.calledOnceWithExactly({ - 'server.request.uri.raw': '/path', - 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, - 'server.request.method': 'POST', - 'http.client_ip': '127.0.0.1', - 'waf.context.processor': { 'extract-schema': true } + persistent: { + 'server.request.uri.raw': '/path', + 'server.request.headers.no_cookies': { 'user-agent': 'Arachni', host: 'localhost' }, + 'server.request.method': 'POST', + 'http.client_ip': '127.0.0.1', + 'waf.context.processor': { 'extract-schema': true } + } }, req) }) }) @@ -582,7 +596,9 @@ describe('AppSec Index', () => { bodyParser.publish({ req, res, body, abortController }) expect(waf.run).to.have.been.calledOnceWith({ - 'server.request.body': { key: 'value' } + persistent: { + 'server.request.body': { key: 'value' } + } }) expect(abortController.abort).not.to.have.been.called expect(res.end).not.to.have.been.called @@ -596,7 +612,9 @@ describe('AppSec Index', () => { bodyParser.publish({ req, res, body, abortController }) expect(waf.run).to.have.been.calledOnceWith({ - 'server.request.body': { key: 'value' } + persistent: { + 'server.request.body': { key: 'value' } + } }) expect(abortController.abort).to.have.been.called expect(res.end).to.have.been.called @@ -621,7 +639,9 @@ describe('AppSec Index', () => { cookieParser.publish({ req, res, abortController, cookies }) expect(waf.run).to.have.been.calledOnceWith({ - 'server.request.cookies': { key: 'value' } + persistent: { + 'server.request.cookies': { key: 'value' } + } }) expect(abortController.abort).not.to.have.been.called expect(res.end).not.to.have.been.called @@ -634,7 +654,9 @@ describe('AppSec Index', () => { cookieParser.publish({ req, res, abortController, cookies }) expect(waf.run).to.have.been.calledOnceWith({ - 'server.request.cookies': { key: 'value' } + persistent: { + 'server.request.cookies': { key: 'value' } + } }) expect(abortController.abort).to.have.been.called expect(res.end).to.have.been.called @@ -660,7 +682,9 @@ describe('AppSec Index', () => { queryParser.publish({ req, res, query, abortController }) expect(waf.run).to.have.been.calledOnceWith({ - 'server.request.query': { key: 'value' } + persistent: { + 'server.request.query': { key: 'value' } + } }) expect(abortController.abort).not.to.have.been.called expect(res.end).not.to.have.been.called @@ -674,7 +698,9 @@ describe('AppSec Index', () => { queryParser.publish({ req, res, query, abortController }) expect(waf.run).to.have.been.calledOnceWith({ - 'server.request.query': { key: 'value' } + persistent: { + 'server.request.query': { key: 'value' } + } }) expect(abortController.abort).to.have.been.called expect(res.end).to.have.been.called diff --git a/packages/dd-trace/test/appsec/sdk/user_blocking.spec.js b/packages/dd-trace/test/appsec/sdk/user_blocking.spec.js index 64579a94eee..3072b57122b 100644 --- a/packages/dd-trace/test/appsec/sdk/user_blocking.spec.js +++ b/packages/dd-trace/test/appsec/sdk/user_blocking.spec.js @@ -21,8 +21,8 @@ describe('user_blocking', () => { before(() => { const runStub = sinon.stub(waf, 'run') - runStub.withArgs({ [USER_ID]: 'user' }).returns(['block']) - runStub.withArgs({ [USER_ID]: 'gooduser' }).returns(['']) + runStub.withArgs({ persistent: { [USER_ID]: 'user' } }).returns(['block']) + runStub.withArgs({ persistent: { [USER_ID]: 'gooduser' } }).returns(['']) }) beforeEach(() => { diff --git a/packages/dd-trace/test/appsec/waf/index.spec.js b/packages/dd-trace/test/appsec/waf/index.spec.js index 0c01a8ad788..bcdfcf5cb79 100644 --- a/packages/dd-trace/test/appsec/waf/index.spec.js +++ b/packages/dd-trace/test/appsec/waf/index.spec.js @@ -214,15 +214,19 @@ describe('WAF Manager', () => { ddwafContext.run.returns({ totalRuntime: 1, durationExt: 1 }) wafContextWrapper.run({ - 'server.request.headers.no_cookies': { 'header': 'value' }, - 'server.request.uri.raw': 'https://testurl', - 'processor.address': { 'extract-schema': true } + persistent: { + 'server.request.headers.no_cookies': { 'header': 'value' }, + 'server.request.uri.raw': 'https://testurl', + 'processor.address': { 'extract-schema': true } + } }) expect(ddwafContext.run).to.be.calledOnceWithExactly({ - 'server.request.headers.no_cookies': { 'header': 'value' }, - 'server.request.uri.raw': 'https://testurl', - 'processor.address': { 'extract-schema': true } + persistent: { + 'server.request.headers.no_cookies': { 'header': 'value' }, + 'server.request.uri.raw': 'https://testurl', + 'processor.address': { 'extract-schema': true } + } }, config.appsec.wafTimeout) }) @@ -235,7 +239,9 @@ describe('WAF Manager', () => { ddwafContext.run.returns(result) const params = { - 'server.request.headers.no_cookies': { 'header': 'value' } + persistent: { + 'server.request.headers.no_cookies': { 'header': 'value' } + } } wafContextWrapper.run(params) @@ -252,7 +258,9 @@ describe('WAF Manager', () => { ddwafContext.run.returns(result) const params = { - 'server.request.headers.no_cookies': { 'header': 'value' } + persistent: { + 'server.request.headers.no_cookies': { 'header': 'value' } + } } wafContextWrapper.run(params) @@ -266,7 +274,9 @@ describe('WAF Manager', () => { it('should not report attack when ddwafContext does not return events', () => { ddwafContext.run.returns({ totalRuntime: 1, durationExt: 1 }) const params = { - 'server.request.headers.no_cookies': { 'header': 'value' } + persistent: { + 'server.request.headers.no_cookies': { 'header': 'value' } + } } wafContextWrapper.run(params) @@ -277,7 +287,9 @@ describe('WAF Manager', () => { it('should not report attack when ddwafContext returns empty data', () => { ddwafContext.run.returns({ totalRuntime: 1, durationExt: 1, events: [] }) const params = { - 'server.request.headers.no_cookies': { 'header': 'value' } + persistent: { + 'server.request.headers.no_cookies': { 'header': 'value' } + } } wafContextWrapper.run(params) @@ -290,7 +302,9 @@ describe('WAF Manager', () => { ddwafContext.run.returns({ totalRuntime: 1, durationExt: 1, events: [], actions: actions }) const params = { - 'server.request.headers.no_cookies': { 'header': 'value' } + persistent: { + 'server.request.headers.no_cookies': { 'header': 'value' } + } } const result = wafContextWrapper.run(params) @@ -305,9 +319,11 @@ describe('WAF Manager', () => { derivatives: [{ '_dd.appsec.s.req.body': [8] }] } const params = { - 'server.request.body': 'value', - 'waf.context.processor': { - 'extract-schema': true + persistent: { + 'server.request.body': 'value', + 'waf.context.processor': { + 'extract-schema': true + } } } diff --git a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js index 23df1adfdc5..b422c5eeda6 100644 --- a/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js +++ b/packages/dd-trace/test/appsec/waf/waf_context_wrapper.spec.js @@ -11,7 +11,9 @@ describe('WAFContextWrapper', () => { const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0') const payload = { - [addresses.HTTP_INCOMING_QUERY]: { key: 'value' } + persistent: { + [addresses.HTTP_INCOMING_QUERY]: { key: 'value' } + } } wafContextWrapper.run(payload) @@ -19,4 +21,33 @@ describe('WAFContextWrapper', () => { expect(ddwafContext.run).to.have.been.calledOnceWithExactly(payload, 1000) }) + + it('Should send ephemeral addreses every time', () => { + const ddwafContext = { + run: sinon.stub() + } + const wafContextWrapper = new WAFContextWrapper(ddwafContext, 1000, '1.14.0', '1.8.0') + + const payload = { + persistent: { + [addresses.HTTP_INCOMING_QUERY]: { key: 'value' } + }, + ephemeral: { + [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: { anotherKey: 'anotherValue' } + } + } + + wafContextWrapper.run(payload) + wafContextWrapper.run(payload) + + expect(ddwafContext.run).to.have.been.calledTwice + expect(ddwafContext.run.firstCall).to.have.been.calledWithExactly(payload, 1000) + expect(ddwafContext.run.secondCall).to.have.been.calledWithExactly({ + ephemeral: { + [addresses.HTTP_INCOMING_GRAPHQL_RESOLVER]: { + anotherKey: 'anotherValue' + } + } + }, 1000) + }) }) diff --git a/yarn.lock b/yarn.lock index bd8743d5f0b..9ecfe4833eb 100644 --- a/yarn.lock +++ b/yarn.lock @@ -412,10 +412,10 @@ resolved "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz" integrity "sha1-u1BFecHK6SPmV2pPXaQ9Jfl729k= sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==" -"@datadog/native-appsec@5.0.0": - version "5.0.0" - resolved "https://registry.npmjs.org/@datadog/native-appsec/-/native-appsec-5.0.0.tgz" - integrity "sha1-5C539CBiUyrX3vo6eQkNyLAgwis= sha512-Ks8a4L49N40w+TJjj2e9ncGssUIEjo4wnmUFjPBRvlLGuVj1VJLxCx7ztpd8eTycM5QQlzggCDOP6CMEVmeZbA==" +"@datadog/native-appsec@6.0.0": + version "6.0.0" + resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-6.0.0.tgz#da753f8566ec5180ad9e83014cb44984b4bc878e" + integrity sha512-e7vH5usFoqov7FraPcA99fe80t2/qm4Cmno1T3iBhYlhyO6HD01ArDsCZ/sUvNIUR1ujxtbr8Z9WRGJ0qQ/FDA== dependencies: node-gyp-build "^3.9.0" From 8f4740152f728ef8d38fe435cb63d672582ef9e6 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Thu, 21 Dec 2023 14:58:37 -0500 Subject: [PATCH 133/147] add remote config support for custom tags (#3875) --- .../src/appsec/remote_config/capabilities.js | 6 +- .../src/appsec/remote_config/index.js | 4 + packages/dd-trace/src/config.js | 138 ++++++++++-------- packages/dd-trace/src/opentracing/tracer.js | 4 +- packages/dd-trace/src/telemetry/index.js | 29 +++- .../test/appsec/remote_config/index.spec.js | 117 +++++++-------- packages/dd-trace/test/config.spec.js | 2 +- 7 files changed, 169 insertions(+), 131 deletions(-) diff --git a/packages/dd-trace/src/appsec/remote_config/capabilities.js b/packages/dd-trace/src/appsec/remote_config/capabilities.js index 6032b6543c2..8d53eb05596 100644 --- a/packages/dd-trace/src/appsec/remote_config/capabilities.js +++ b/packages/dd-trace/src/appsec/remote_config/capabilities.js @@ -10,5 +10,9 @@ module.exports = { ASM_CUSTOM_RULES: 1n << 8n, ASM_CUSTOM_BLOCKING_RESPONSE: 1n << 9n, ASM_TRUSTED_IPS: 1n << 10n, - ASM_API_SECURITY_SAMPLE_RATE: 1n << 11n + ASM_API_SECURITY_SAMPLE_RATE: 1n << 11n, + APM_TRACING_SAMPLE_RATE: 1n << 12n, + APM_TRACING_LOGS_INJECTION: 1n << 13n, + APM_TRACING_HTTP_HEADER_TAGS: 1n << 14n, + APM_TRACING_CUSTOM_TAGS: 1n << 15n } diff --git a/packages/dd-trace/src/appsec/remote_config/index.js b/packages/dd-trace/src/appsec/remote_config/index.js index 08d912d22e1..e2b45ff158f 100644 --- a/packages/dd-trace/src/appsec/remote_config/index.js +++ b/packages/dd-trace/src/appsec/remote_config/index.js @@ -10,6 +10,10 @@ let rc function enable (config) { rc = new RemoteConfigManager(config) + rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_CUSTOM_TAGS, true) + rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_HTTP_HEADER_TAGS, true) + rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_LOGS_INJECTION, true) + rc.updateCapabilities(RemoteConfigCapabilities.APM_TRACING_SAMPLE_RATE, true) const activation = Activation.fromConfig(config) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index fb5615bdf17..40c9c68d091 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -109,13 +109,6 @@ class Config { log.use(this.logger) log.toggle(this.debug, this.logLevel, this) - this.tags = {} - - tagger.add(this.tags, process.env.DD_TAGS) - tagger.add(this.tags, process.env.DD_TRACE_TAGS) - tagger.add(this.tags, process.env.DD_TRACE_GLOBAL_TAGS) - tagger.add(this.tags, options.tags) - const DD_TRACING_ENABLED = coalesce( process.env.DD_TRACING_ENABLED, true @@ -184,33 +177,12 @@ class Config { false ) - const DD_SERVICE = options.service || - process.env.DD_SERVICE || - process.env.DD_SERVICE_NAME || - this.tags.service || - process.env.AWS_LAMBDA_FUNCTION_NAME || - process.env.FUNCTION_NAME || // Google Cloud Function Name set by deprecated runtimes - process.env.K_SERVICE || // Google Cloud Function Name set by newer runtimes - process.env.WEBSITE_SITE_NAME || // set by Azure Functions - pkg.name || - 'node' const DD_SERVICE_MAPPING = coalesce( options.serviceMapping, process.env.DD_SERVICE_MAPPING ? fromEntries( process.env.DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':')) ) : {} ) - const DD_ENV = coalesce( - options.env, - process.env.DD_ENV, - this.tags.env - ) - const DD_VERSION = coalesce( - options.version, - process.env.DD_VERSION, - this.tags.version, - pkg.version - ) const DD_TRACE_STARTUP_LOGS = coalesce( options.startupLogs, process.env.DD_TRACE_STARTUP_LOGS, @@ -583,7 +555,6 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this.dsmEnabled = isTrue(DD_DATA_STREAMS_ENABLED) this.openAiLogsEnabled = DD_OPENAI_LOGS_ENABLED this.apiKey = DD_API_KEY - this.env = DD_ENV this.url = DD_CIVISIBILITY_AGENTLESS_URL ? new URL(DD_CIVISIBILITY_AGENTLESS_URL) : getAgentUrl(DD_TRACE_AGENT_URL, options) this.site = coalesce(options.site, process.env.DD_SITE, 'datadoghq.com') @@ -595,9 +566,7 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this.clientIpEnabled = DD_TRACE_CLIENT_IP_ENABLED this.clientIpHeader = DD_TRACE_CLIENT_IP_HEADER this.plugins = !!coalesce(options.plugins, true) - this.service = DD_SERVICE this.serviceMapping = DD_SERVICE_MAPPING - this.version = DD_VERSION this.dogstatsd = { hostname: coalesce(dogstatsd.hostname, process.env.DD_DOGSTATSD_HOSTNAME, this.hostname), port: String(coalesce(dogstatsd.port, process.env.DD_DOGSTATSD_PORT, 8125)) @@ -690,6 +659,31 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) // Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent this.memcachedCommandEnabled = isTrue(DD_TRACE_MEMCACHED_COMMAND_ENABLED) + this.stats = { + enabled: isTrue(DD_TRACE_STATS_COMPUTATION_ENABLED) + } + + this.traceId128BitGenerationEnabled = isTrue(DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED) + this.traceId128BitLoggingEnabled = isTrue(DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED) + + this.isGCPFunction = isGCPFunction + this.isAzureFunctionConsumptionPlan = isAzureFunctionConsumptionPlan + + this.spanLeakDebug = Number(DD_TRACE_SPAN_LEAK_DEBUG) + + this._applyDefaults() + this._applyEnvironment() + this._applyOptions(options) + this._applyRemote({}) + this._merge() + + tagger.add(this.tags, { + service: this.service, + env: this.env, + version: this.version, + 'runtime-id': uuid() + }) + if (this.gitMetadataEnabled) { this.repositoryUrl = removeUserSensitiveInfo( coalesce( @@ -722,31 +716,6 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) } } } - - this.stats = { - enabled: isTrue(DD_TRACE_STATS_COMPUTATION_ENABLED) - } - - this.traceId128BitGenerationEnabled = isTrue(DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED) - this.traceId128BitLoggingEnabled = isTrue(DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED) - - this.isGCPFunction = isGCPFunction - this.isAzureFunctionConsumptionPlan = isAzureFunctionConsumptionPlan - - this.spanLeakDebug = Number(DD_TRACE_SPAN_LEAK_DEBUG) - - tagger.add(this.tags, { - service: this.service, - env: this.env, - version: this.version, - 'runtime-id': uuid() - }) - - this._applyDefaults() - this._applyEnvironment() - this._applyOptions(options) - this._applyRemote({}) - this._merge() } // Supports only a subset of options for now. @@ -761,48 +730,93 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) } _applyDefaults () { + const { + AWS_LAMBDA_FUNCTION_NAME, + FUNCTION_NAME, + K_SERVICE, + WEBSITE_SITE_NAME + } = process.env + + const service = AWS_LAMBDA_FUNCTION_NAME || + FUNCTION_NAME || // Google Cloud Function Name set by deprecated runtimes + K_SERVICE || // Google Cloud Function Name set by newer runtimes + WEBSITE_SITE_NAME || // set by Azure Functions + pkg.name || + 'node' + const defaults = this._defaults = {} + this._setValue(defaults, 'service', service) + this._setValue(defaults, 'env', undefined) + this._setValue(defaults, 'version', pkg.version) this._setUnit(defaults, 'sampleRate', undefined) this._setBoolean(defaults, 'logInjection', false) this._setArray(defaults, 'headerTags', []) + this._setValue(defaults, 'tags', {}) } _applyEnvironment () { const { - DD_TRACE_SAMPLE_RATE, + DD_ENV, DD_LOGS_INJECTION, - DD_TRACE_HEADER_TAGS + DD_SERVICE, + DD_SERVICE_NAME, + DD_TAGS, + DD_TRACE_GLOBAL_TAGS, + DD_TRACE_HEADER_TAGS, + DD_TRACE_SAMPLE_RATE, + DD_TRACE_TAGS, + DD_VERSION } = process.env + const tags = {} const env = this._env = {} + tagger.add(tags, DD_TAGS) + tagger.add(tags, DD_TRACE_TAGS) + tagger.add(tags, DD_TRACE_GLOBAL_TAGS) + + this._setValue(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service) + this._setValue(env, 'env', DD_ENV || tags.env) + this._setValue(env, 'version', DD_VERSION || tags.version) this._setUnit(env, 'sampleRate', DD_TRACE_SAMPLE_RATE) this._setBoolean(env, 'logInjection', DD_LOGS_INJECTION) this._setArray(env, 'headerTags', DD_TRACE_HEADER_TAGS) + this._setTags(env, 'tags', tags) } _applyOptions (options) { const opts = this._options = this._options || {} + const tags = {} options = Object.assign({ ingestion: {} }, options, opts) + tagger.add(tags, options.tags) + + this._setValue(opts, 'service', options.service || tags.service) + this._setValue(opts, 'env', options.env || tags.env) + this._setValue(opts, 'version', options.version || tags.version) this._setUnit(opts, 'sampleRate', coalesce(options.sampleRate, options.ingestion.sampleRate)) this._setBoolean(opts, 'logInjection', options.logInjection) this._setArray(opts, 'headerTags', options.headerTags) + this._setTags(opts, 'tags', tags) } _applyRemote (options) { const opts = this._remote = this._remote || {} + const tags = {} const headerTags = options.tracing_header_tags ? options.tracing_header_tags.map(tag => { return tag.tag_name ? `${tag.header}:${tag.tag_name}` : tag.header }) : undefined + tagger.add(tags, options.tracing_tags) + this._setUnit(opts, 'sampleRate', options.tracing_sampling_rate) this._setBoolean(opts, 'logInjection', options.log_injection_enabled) this._setArray(opts, 'headerTags', headerTags) + this._setTags(opts, 'tags', tags) } _setBoolean (obj, name, value) { @@ -842,6 +856,14 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) } } + _setTags (obj, name, value) { + if (!value || Object.keys(value).length === 0) { + return this._setValue(obj, name, null) + } + + this._setValue(obj, name, value) + } + _setValue (obj, name, value) { obj[name] = value } diff --git a/packages/dd-trace/src/opentracing/tracer.js b/packages/dd-trace/src/opentracing/tracer.js index 2a46d8a8c9a..8b18938631b 100644 --- a/packages/dd-trace/src/opentracing/tracer.js +++ b/packages/dd-trace/src/opentracing/tracer.js @@ -22,10 +22,10 @@ class DatadogTracer { constructor (config) { const Exporter = getExporter(config.experimental.exporter) + this._config = config this._service = config.service this._version = config.version this._env = config.env - this._tags = config.tags this._logInjection = config.logInjection this._debug = config.debug this._prioritySampler = new PrioritySampler(config.env, config.sampler) @@ -64,7 +64,7 @@ class DatadogTracer { integrationName: options.integrationName }, this._debug) - span.addTags(this._tags) + span.addTags(this._config.tags) span.addTags(options.tags) return span diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js index a99aba775eb..a2db5e13971 100644 --- a/packages/dd-trace/src/telemetry/index.js +++ b/packages/dd-trace/src/telemetry/index.js @@ -286,11 +286,30 @@ function updateConfig (changes, config) { const application = createAppObject(config) const host = createHostObject() - const configuration = changes.map(change => ({ - name: change.name, - value: Array.isArray(change.value) ? change.value.join(',') : change.value, - origin: change.origin - })) + const names = { + sampleRate: 'DD_TRACE_SAMPLE_RATE', + logInjection: 'DD_LOG_INJECTION', + headerTags: 'DD_TRACE_HEADER_TAGS', + tags: 'DD_TAGS' + } + + const configuration = [] + + for (const change of changes) { + if (!names.hasOwnProperty(change.name)) continue + + const name = names[change.name] + const { origin, value } = change + const entry = { name, origin, value } + + if (Array.isArray(value)) { + entry.value = value.join(',') + } else if (name === 'DD_TAGS') { + entry.value = Object.entries(value).map(([key, value]) => `${key}:${value}`) + } + + configuration.push(entry) + } const { reqType, payload } = createPayload('app-client-configuration-change', { configuration }) diff --git a/packages/dd-trace/test/appsec/remote_config/index.spec.js b/packages/dd-trace/test/appsec/remote_config/index.spec.js index 6287c693ddc..9efc142c02e 100644 --- a/packages/dd-trace/test/appsec/remote_config/index.spec.js +++ b/packages/dd-trace/test/appsec/remote_config/index.spec.js @@ -56,8 +56,8 @@ describe('Remote Config index', () => { remoteConfig.enable(config) expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) - expect(rc.updateCapabilities).to.have.been.calledOnceWithExactly(RemoteConfigCapabilities.ASM_ACTIVATION, true) - expect(rc.on).to.have.been.calledOnceWith('ASM_FEATURES') + expect(rc.updateCapabilities).to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ACTIVATION, true) + expect(rc.on).to.have.been.calledWith('ASM_FEATURES') expect(rc.on.firstCall.args[1]).to.be.a('function') }) @@ -67,7 +67,7 @@ describe('Remote Config index', () => { remoteConfig.enable(config) expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) - expect(rc.updateCapabilities).to.not.have.been.called + expect(rc.updateCapabilities).to.not.have.been.calledWith('ASM_ACTIVATION') expect(rc.on).to.have.been.calledOnceWith('ASM_FEATURES') expect(rc.on.firstCall.args[1]).to.be.a('function') }) @@ -78,7 +78,7 @@ describe('Remote Config index', () => { remoteConfig.enable(config) expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) - expect(rc.updateCapabilities).to.not.have.been.called + expect(rc.updateCapabilities).to.not.have.been.calledWith(RemoteConfigCapabilities.ASM_ACTIVATION, true) expect(rc.on).to.not.have.been.called }) @@ -88,10 +88,9 @@ describe('Remote Config index', () => { remoteConfig.enable(config) expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) - expect(rc.updateCapabilities).to.have.been.calledTwice - expect(rc.updateCapabilities.firstCall) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ACTIVATION, true) - expect(rc.updateCapabilities.secondCall) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_API_SECURITY_SAMPLE_RATE, true) }) @@ -101,8 +100,7 @@ describe('Remote Config index', () => { remoteConfig.enable(config) expect(RemoteConfigManager).to.have.been.calledOnceWithExactly(config) - expect(rc.updateCapabilities).to.have.been.calledOnce - expect(rc.updateCapabilities.firstCall) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_API_SECURITY_SAMPLE_RATE, true) }) @@ -259,7 +257,7 @@ describe('Remote Config index', () => { remoteConfig.enable(config) remoteConfig.enableWafUpdate(config.appsec) - expect(rc.updateCapabilities).to.not.have.been.called + expect(rc.updateCapabilities).to.not.have.been.calledWith('ASM_ACTIVATION') expect(rc.on).to.have.been.called }) @@ -268,30 +266,27 @@ describe('Remote Config index', () => { remoteConfig.enable(config) remoteConfig.enableWafUpdate(config.appsec) - expect(rc.updateCapabilities.callCount).to.be.equal(8) - expect(rc.updateCapabilities.getCall(0)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_IP_BLOCKING, true) - expect(rc.updateCapabilities.getCall(1)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_USER_BLOCKING, true) - expect(rc.updateCapabilities.getCall(2)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_RULES, true) - expect(rc.updateCapabilities.getCall(3)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_EXCLUSIONS, true) - expect(rc.updateCapabilities.getCall(4)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, true) - expect(rc.updateCapabilities.getCall(5)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true) - expect(rc.updateCapabilities.getCall(6)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) - expect(rc.updateCapabilities.getCall(7)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) - expect(rc.on.callCount).to.be.equal(5) - expect(rc.on.getCall(0)).to.have.been.calledWith('ASM_FEATURES') - expect(rc.on.getCall(1)).to.have.been.calledWith('ASM_DATA') - expect(rc.on.getCall(2)).to.have.been.calledWith('ASM_DD') - expect(rc.on.getCall(3)).to.have.been.calledWith('ASM') - expect(rc.on.getCall(4)).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) + expect(rc.on).to.have.been.calledWith('ASM_DATA') + expect(rc.on).to.have.been.calledWith('ASM_DD') + expect(rc.on).to.have.been.calledWith('ASM') + expect(rc.on).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) }) it('should activate if appsec is manually enabled', () => { @@ -299,30 +294,27 @@ describe('Remote Config index', () => { remoteConfig.enable(config) remoteConfig.enableWafUpdate(config.appsec) - expect(rc.updateCapabilities.callCount).to.be.equal(8) - expect(rc.updateCapabilities.getCall(0)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_IP_BLOCKING, true) - expect(rc.updateCapabilities.getCall(1)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_USER_BLOCKING, true) - expect(rc.updateCapabilities.getCall(2)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_RULES, true) - expect(rc.updateCapabilities.getCall(3)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_EXCLUSIONS, true) - expect(rc.updateCapabilities.getCall(4)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, true) - expect(rc.updateCapabilities.getCall(5)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true) - expect(rc.updateCapabilities.getCall(6)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) - expect(rc.updateCapabilities.getCall(7)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) - expect(rc.on.callCount).to.be.equal(5) - expect(rc.on.getCall(0)).to.have.been.calledWith('ASM_FEATURES') - expect(rc.on.getCall(1)).to.have.been.calledWith('ASM_DATA') - expect(rc.on.getCall(2)).to.have.been.calledWith('ASM_DD') - expect(rc.on.getCall(3)).to.have.been.calledWith('ASM') - expect(rc.on.getCall(4)).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) + expect(rc.on).to.have.been.calledWith('ASM_DATA') + expect(rc.on).to.have.been.calledWith('ASM_DD') + expect(rc.on).to.have.been.calledWith('ASM') + expect(rc.on).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) }) it('should activate if appsec enabled is not defined', () => { @@ -330,24 +322,23 @@ describe('Remote Config index', () => { remoteConfig.enable(config) remoteConfig.enableWafUpdate(config.appsec) - expect(rc.updateCapabilities.callCount).to.be.equal(9) - expect(rc.updateCapabilities.getCall(0)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_ACTIVATION, true) - expect(rc.updateCapabilities.getCall(1)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_IP_BLOCKING, true) - expect(rc.updateCapabilities.getCall(2)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_USER_BLOCKING, true) - expect(rc.updateCapabilities.getCall(3)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_RULES, true) - expect(rc.updateCapabilities.getCall(4)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_EXCLUSIONS, true) - expect(rc.updateCapabilities.getCall(5)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, true) - expect(rc.updateCapabilities.getCall(6)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_RULES, true) - expect(rc.updateCapabilities.getCall(7)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, true) - expect(rc.updateCapabilities.getCall(8)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, true) }) }) @@ -358,29 +349,27 @@ describe('Remote Config index', () => { rc.updateCapabilities.resetHistory() remoteConfig.disableWafUpdate() - expect(rc.updateCapabilities.callCount).to.be.equal(8) - expect(rc.updateCapabilities.getCall(0)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_IP_BLOCKING, false) - expect(rc.updateCapabilities.getCall(1)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_USER_BLOCKING, false) - expect(rc.updateCapabilities.getCall(2)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_DD_RULES, false) - expect(rc.updateCapabilities.getCall(3)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_EXCLUSIONS, false) - expect(rc.updateCapabilities.getCall(4)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_REQUEST_BLOCKING, false) - expect(rc.updateCapabilities.getCall(5)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_RULES, false) - expect(rc.updateCapabilities.getCall(6)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_CUSTOM_BLOCKING_RESPONSE, false) - expect(rc.updateCapabilities.getCall(7)) + expect(rc.updateCapabilities) .to.have.been.calledWithExactly(RemoteConfigCapabilities.ASM_TRUSTED_IPS, false) - expect(rc.off.callCount).to.be.equal(4) - expect(rc.off.getCall(0)).to.have.been.calledWith('ASM_DATA') - expect(rc.off.getCall(1)).to.have.been.calledWith('ASM_DD') - expect(rc.off.getCall(2)).to.have.been.calledWith('ASM') - expect(rc.off.getCall(3)).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) + expect(rc.off).to.have.been.calledWith('ASM_DATA') + expect(rc.off).to.have.been.calledWith('ASM_DD') + expect(rc.off).to.have.been.calledWith('ASM') + expect(rc.off).to.have.been.calledWithExactly(kPreUpdate, RuleManager.updateWafFromRC) }) }) }) diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index a7647e395f1..21047b5c894 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -763,7 +763,7 @@ describe('Config', () => { expect(config).to.have.property('clientIpHeader', 'x-true-client-ip') expect(config).to.have.property('traceId128BitGenerationEnabled', false) expect(config).to.have.property('traceId128BitLoggingEnabled', false) - expect(config.tags).to.include({ foo: 'foo', baz: 'qux' }) + expect(config.tags).to.include({ foo: 'foo' }) expect(config.tags).to.include({ service: 'test', version: '1.0.0', env: 'development' }) expect(config).to.have.deep.property('serviceMapping', { b: 'bb' }) expect(config).to.have.property('spanAttributeSchema', 'v1') From 0e681721906f7b350046fdd524605be2bbd3f2d6 Mon Sep 17 00:00:00 2001 From: Brian Devins-Suresh Date: Thu, 21 Dec 2023 16:45:24 -0500 Subject: [PATCH 134/147] Update test agent configuration (#3372) * Update test agent configuration * Update test sgent config * Update test agent configuration --------- Co-authored-by: William Conti --- .github/workflows/plugins.yml | 8 ++++---- docker-compose.yml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 6ba2e693766..59f2dcd3934 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -30,11 +30,11 @@ jobs: ports: - 3000:3000 testagent: - image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:latest + image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.16.0 env: LOG_LEVEL: DEBUG TRACE_LANGUAGE: javascript - DISABLED_CHECKS: trace_content_length + ENABLED_CHECKS: trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service PORT: 9126 ports: - 9126:9126 @@ -985,11 +985,11 @@ jobs: - 1521:1521 - 5500:5500 testagent: - image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:latest + image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.16.0 env: LOG_LEVEL: DEBUG TRACE_LANGUAGE: javascript - DISABLED_CHECKS: trace_content_length + ENABLED_CHECKS: trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service PORT: 9126 ports: - 9126:9126 diff --git a/docker-compose.yml b/docker-compose.yml index 6abe59c677d..226b8ada2af 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -159,11 +159,11 @@ services: - LDAP_PASSWORDS=password1,password2 testagent: - image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.13.1 + image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.16.0 ports: - "127.0.0.1:9126:9126" environment: - LOG_LEVEL=DEBUG - TRACE_LANGUAGE=javascript - - DISABLED_CHECKS=trace_content_length + - ENABLED_CHECKS=trace_stall,meta_tracer_version_header,trace_count_header,trace_peer_service - PORT=9126 From ffcfe6d2201b62fba1ca119208a57a70a855acb9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 22 Dec 2023 15:04:19 +0100 Subject: [PATCH 135/147] =?UTF-8?q?[ci-visibility]=C2=A0CI=20Visibility=20?= =?UTF-8?q?telemetry=20-=20agentful=20=20(#3752)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/datadog-instrumentations/src/jest.js | 2 +- packages/datadog-plugin-cucumber/src/index.js | 36 ++++- packages/datadog-plugin-cypress/src/plugin.js | 68 +++++++-- packages/datadog-plugin-jest/src/index.js | 42 +++++- packages/datadog-plugin-mocha/src/index.js | 33 ++++- .../datadog-plugin-playwright/src/index.js | 18 ++- .../exporters/agentless/coverage-writer.js | 31 ++++- .../exporters/agentless/writer.js | 31 ++++- .../exporters/git/git_metadata.js | 40 +++++- .../get-itr-configuration.js | 19 ++- .../get-skippable-suites.js | 27 +++- .../dd-trace/src/ci-visibility/telemetry.js | 130 ++++++++++++++++++ .../src/encode/agentless-ci-visibility.js | 15 +- .../src/encode/coverage-ci-visibility.js | 14 ++ .../exporters/common/agent-info-exporter.js | 4 + .../src/exporters/common/form-data.js | 4 + packages/dd-trace/src/plugins/ci_plugin.js | 52 +++++-- packages/dd-trace/src/plugins/util/exec.js | 25 +++- packages/dd-trace/src/plugins/util/git.js | 113 ++++++++++++--- packages/dd-trace/src/telemetry/index.js | 4 + .../agentless/coverage-writer.spec.js | 9 +- .../exporters/ci-visibility-exporter.spec.js | 2 + .../dd-trace/test/plugins/util/git.spec.js | 63 ++++----- 23 files changed, 688 insertions(+), 94 deletions(-) create mode 100644 packages/dd-trace/src/ci-visibility/telemetry.js diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index f62f0c9fac9..82935351e7d 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -403,7 +403,7 @@ function jestAdapterWrapper (jestAdapter, jestVersion) { const coverageFiles = getCoveredFilenamesFromCoverage(environment.global.__coverage__) .map(filename => getTestSuitePath(filename, environment.rootDir)) asyncResource.runInAsyncScope(() => { - testSuiteCodeCoverageCh.publish([...coverageFiles, environment.testSuite]) + testSuiteCodeCoverageCh.publish({ coverageFiles, testSuite: environment.testSuite }) }) } testSuiteFinishCh.publish({ status, errorMessage }) diff --git a/packages/datadog-plugin-cucumber/src/index.js b/packages/datadog-plugin-cucumber/src/index.js index 98fa1b4037c..4f2d29603b7 100644 --- a/packages/datadog-plugin-cucumber/src/index.js +++ b/packages/datadog-plugin-cucumber/src/index.js @@ -12,10 +12,21 @@ const { getTestSuiteCommonTags, addIntelligentTestRunnerSpanTags, TEST_ITR_UNSKIPPABLE, - TEST_ITR_FORCED_RUN + TEST_ITR_FORCED_RUN, + TEST_CODE_OWNERS } = require('../../dd-trace/src/plugins/util/test') const { RESOURCE_NAME } = require('../../../ext/tags') const { COMPONENT, ERROR_MESSAGE } = require('../../dd-trace/src/constants') +const { + TELEMETRY_EVENT_CREATED, + TELEMETRY_EVENT_FINISHED, + TELEMETRY_CODE_COVERAGE_STARTED, + TELEMETRY_CODE_COVERAGE_FINISHED, + TELEMETRY_ITR_FORCED_TO_RUN, + TELEMETRY_CODE_COVERAGE_EMPTY, + TELEMETRY_ITR_UNSKIPPABLE, + TELEMETRY_CODE_COVERAGE_NUM_FILES +} = require('../../dd-trace/src/ci-visibility/telemetry') class CucumberPlugin extends CiPlugin { static get id () { @@ -54,7 +65,9 @@ class CucumberPlugin extends CiPlugin { this.testSessionSpan.setTag(TEST_STATUS, status) this.testModuleSpan.setTag(TEST_STATUS, status) this.testModuleSpan.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module') this.testSessionSpan.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') finishAllTraceSpans(this.testSessionSpan) this.itrConfig = null @@ -69,9 +82,11 @@ class CucumberPlugin extends CiPlugin { 'cucumber' ) if (isUnskippable) { + this.telemetry.count(TELEMETRY_ITR_UNSKIPPABLE, { testLevel: 'suite' }) testSuiteMetadata[TEST_ITR_UNSKIPPABLE] = 'true' } if (isForcedToRun) { + this.telemetry.count(TELEMETRY_ITR_FORCED_TO_RUN, { testLevel: 'suite' }) testSuiteMetadata[TEST_ITR_FORCED_RUN] = 'true' } this.testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', { @@ -82,20 +97,31 @@ class CucumberPlugin extends CiPlugin { ...testSuiteMetadata } }) + this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite') + if (this.itrConfig?.isCodeCoverageEnabled) { + this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_STARTED, 'suite', { library: 'istanbul' }) + } }) this.addSub('ci:cucumber:test-suite:finish', status => { this.testSuiteSpan.setTag(TEST_STATUS, status) this.testSuiteSpan.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite') }) this.addSub('ci:cucumber:test-suite:code-coverage', ({ coverageFiles, suiteFile }) => { - if (!this.itrConfig || !this.itrConfig.isCodeCoverageEnabled) { + if (!this.itrConfig?.isCodeCoverageEnabled) { return } + if (!coverageFiles.length) { + this.telemetry.count(TELEMETRY_CODE_COVERAGE_EMPTY) + } + const relativeCoverageFiles = [...coverageFiles, suiteFile] .map(filename => getTestSuitePath(filename, this.sourceRoot)) + this.telemetry.distribution(TELEMETRY_CODE_COVERAGE_NUM_FILES, {}, relativeCoverageFiles.length) + const formattedCoverage = { sessionId: this.testSuiteSpan.context()._traceId, suiteId: this.testSuiteSpan.context()._spanId, @@ -103,6 +129,7 @@ class CucumberPlugin extends CiPlugin { } this.tracer._exporter.exportCoverage(formattedCoverage) + this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_FINISHED, 'suite', { library: 'istanbul' }) }) this.addSub('ci:cucumber:test:start', ({ testName, fullTestSuite, testSourceLine }) => { @@ -142,6 +169,11 @@ class CucumberPlugin extends CiPlugin { } span.finish() + this.telemetry.ciVisEvent( + TELEMETRY_EVENT_FINISHED, + 'test', + { hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] } + ) if (!isStep) { finishAllTraceSpans(span) } diff --git a/packages/datadog-plugin-cypress/src/plugin.js b/packages/datadog-plugin-cypress/src/plugin.js index 6b767501362..abd6abec33b 100644 --- a/packages/datadog-plugin-cypress/src/plugin.js +++ b/packages/datadog-plugin-cypress/src/plugin.js @@ -29,6 +29,29 @@ const { ORIGIN_KEY, COMPONENT } = require('../../dd-trace/src/constants') const log = require('../../dd-trace/src/log') const NoopTracer = require('../../dd-trace/src/noop/tracer') const { isMarkedAsUnskippable } = require('../../datadog-plugin-jest/src/util') +const { + TELEMETRY_EVENT_CREATED, + TELEMETRY_EVENT_FINISHED, + TELEMETRY_ITR_FORCED_TO_RUN, + TELEMETRY_CODE_COVERAGE_EMPTY, + TELEMETRY_ITR_UNSKIPPABLE, + TELEMETRY_CODE_COVERAGE_NUM_FILES, + incrementCountMetric, + distributionMetric +} = require('../../dd-trace/src/ci-visibility/telemetry') +const { + GIT_REPOSITORY_URL, + GIT_COMMIT_SHA, + GIT_BRANCH, + CI_PROVIDER_NAME +} = require('../../dd-trace/src/plugins/util/tags') +const { + OS_VERSION, + OS_PLATFORM, + OS_ARCHITECTURE, + RUNTIME_NAME, + RUNTIME_VERSION +} = require('../../dd-trace/src/plugins/util/env') const TEST_FRAMEWORK_NAME = 'cypress' @@ -152,16 +175,19 @@ module.exports = (on, config) => { const testEnvironmentMetadata = getTestEnvironmentMetadata(TEST_FRAMEWORK_NAME) const { - 'git.repository_url': repositoryUrl, - 'git.commit.sha': sha, - 'os.version': osVersion, - 'os.platform': osPlatform, - 'os.architecture': osArchitecture, - 'runtime.name': runtimeName, - 'runtime.version': runtimeVersion, - 'git.branch': branch + [GIT_REPOSITORY_URL]: repositoryUrl, + [GIT_COMMIT_SHA]: sha, + [OS_VERSION]: osVersion, + [OS_PLATFORM]: osPlatform, + [OS_ARCHITECTURE]: osArchitecture, + [RUNTIME_NAME]: runtimeName, + [RUNTIME_VERSION]: runtimeVersion, + [GIT_BRANCH]: branch, + [CI_PROVIDER_NAME]: ciProviderName } = testEnvironmentMetadata + const isUnsupportedCIProvider = !ciProviderName + const finishedTestsByFile = {} const testConfiguration = { @@ -192,6 +218,15 @@ module.exports = (on, config) => { let hasForcedToRunSuites = false let hasUnskippableSuites = false + function ciVisEvent (name, testLevel, tags = {}) { + incrementCountMetric(name, { + testLevel, + testFramework: 'cypress', + isUnsupportedCIProvider, + ...tags + }) + } + function getTestSpan (testName, testSuite, isUnskippable, isForcedToRun) { const testSuiteTags = { [TEST_COMMAND]: command, @@ -220,14 +255,18 @@ module.exports = (on, config) => { if (isUnskippable) { hasUnskippableSuites = true + incrementCountMetric(TELEMETRY_ITR_UNSKIPPABLE, { testLevel: 'suite' }) testSpanMetadata[TEST_ITR_UNSKIPPABLE] = 'true' } if (isForcedToRun) { hasForcedToRunSuites = true + incrementCountMetric(TELEMETRY_ITR_FORCED_TO_RUN, { testLevel: 'suite' }) testSpanMetadata[TEST_ITR_FORCED_RUN] = 'true' } + ciVisEvent(TELEMETRY_EVENT_CREATED, 'test', { hasCodeOwners: !!codeOwners }) + return tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test`, { childOf, tags: { @@ -281,6 +320,8 @@ module.exports = (on, config) => { ...testSessionSpanMetadata } }) + ciVisEvent(TELEMETRY_EVENT_CREATED, 'session') + testModuleSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_module`, { childOf: testSessionSpan, tags: { @@ -289,6 +330,8 @@ module.exports = (on, config) => { ...testModuleSpanMetadata } }) + ciVisEvent(TELEMETRY_EVENT_CREATED, 'module') + return details }) }) @@ -347,6 +390,7 @@ module.exports = (on, config) => { } testSuiteSpan.finish() testSuiteSpan = null + ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite') } }) @@ -371,7 +415,9 @@ module.exports = (on, config) => { ) testModuleSpan.finish() + ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module') testSessionSpan.finish() + ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') finishAllTraceSpans(testSessionSpan) } @@ -406,6 +452,7 @@ module.exports = (on, config) => { ...testSuiteSpanMetadata } }) + ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite') return null }, 'dd:beforeEach': (test) => { @@ -435,6 +482,10 @@ module.exports = (on, config) => { if (coverage && isCodeCoverageEnabled && tracer._tracer._exporter && tracer._tracer._exporter.exportCoverage) { const coverageFiles = getCoveredFilenamesFromCoverage(coverage) const relativeCoverageFiles = coverageFiles.map(file => getTestSuitePath(file, rootDir)) + if (!relativeCoverageFiles.length) { + incrementCountMetric(TELEMETRY_CODE_COVERAGE_EMPTY) + } + distributionMetric(TELEMETRY_CODE_COVERAGE_NUM_FILES, {}, relativeCoverageFiles.length) const { _traceId, _spanId } = testSuiteSpan.context() const formattedCoverage = { sessionId: _traceId, @@ -470,6 +521,7 @@ module.exports = (on, config) => { // test spans are finished at after:spec } activeSpan = null + ciVisEvent(TELEMETRY_EVENT_FINISHED, 'test') return null }, 'dd:addTags': (tags) => { diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index 3eaceb034aa..6c4fed9755e 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -12,10 +12,21 @@ const { TEST_FRAMEWORK_VERSION, TEST_SOURCE_START, TEST_ITR_UNSKIPPABLE, - TEST_ITR_FORCED_RUN + TEST_ITR_FORCED_RUN, + TEST_CODE_OWNERS } = require('../../dd-trace/src/plugins/util/test') const { COMPONENT } = require('../../dd-trace/src/constants') const id = require('../../dd-trace/src/id') +const { + TELEMETRY_EVENT_CREATED, + TELEMETRY_EVENT_FINISHED, + TELEMETRY_CODE_COVERAGE_STARTED, + TELEMETRY_CODE_COVERAGE_FINISHED, + TELEMETRY_ITR_FORCED_TO_RUN, + TELEMETRY_CODE_COVERAGE_EMPTY, + TELEMETRY_ITR_UNSKIPPABLE, + TELEMETRY_CODE_COVERAGE_NUM_FILES +} = require('../../dd-trace/src/ci-visibility/telemetry') const isJestWorker = !!process.env.JEST_WORKER_ID @@ -81,7 +92,9 @@ class JestPlugin extends CiPlugin { ) this.testModuleSpan.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module') this.testSessionSpan.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') finishAllTraceSpans(this.testSessionSpan) this.tracer._exporter.flush() }) @@ -103,7 +116,8 @@ class JestPlugin extends CiPlugin { _ddTestCommand: testCommand, _ddTestModuleId: testModuleId, _ddForcedToRun, - _ddUnskippable + _ddUnskippable, + _ddTestCodeCoverageEnabled } = testEnvironmentOptions const testSessionSpanContext = this.tracer.extract('text_map', { @@ -114,8 +128,10 @@ class JestPlugin extends CiPlugin { const testSuiteMetadata = getTestSuiteCommonTags(testCommand, frameworkVersion, testSuite, 'jest') if (_ddUnskippable) { + this.telemetry.count(TELEMETRY_ITR_UNSKIPPABLE, { testLevel: 'suite' }) testSuiteMetadata[TEST_ITR_UNSKIPPABLE] = 'true' if (_ddForcedToRun) { + this.telemetry.count(TELEMETRY_ITR_FORCED_TO_RUN, { testLevel: 'suite' }) testSuiteMetadata[TEST_ITR_FORCED_RUN] = 'true' } } @@ -128,6 +144,10 @@ class JestPlugin extends CiPlugin { ...testSuiteMetadata } }) + this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite') + if (_ddTestCodeCoverageEnabled) { + this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_STARTED, 'suite', { library: 'istanbul' }) + } }) this.addSub('ci:jest:worker-report:trace', traces => { @@ -164,6 +184,7 @@ class JestPlugin extends CiPlugin { this.testSuiteSpan.setTag('error', new Error(errorMessage)) } this.testSuiteSpan.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite') // Suites potentially run in a different process than the session, // so calling finishAllTraceSpans on the session span is not enough finishAllTraceSpans(this.testSuiteSpan) @@ -180,14 +201,22 @@ class JestPlugin extends CiPlugin { * because this subscription happens in a different process from the one * fetching the ITR config. */ - this.addSub('ci:jest:test-suite:code-coverage', (coverageFiles) => { + this.addSub('ci:jest:test-suite:code-coverage', ({ coverageFiles, testSuite }) => { + if (!coverageFiles.length) { + this.telemetry.count(TELEMETRY_CODE_COVERAGE_EMPTY) + } + const files = [...coverageFiles, testSuite] + const { _traceId, _spanId } = this.testSuiteSpan.context() const formattedCoverage = { sessionId: _traceId, suiteId: _spanId, - files: coverageFiles + files } + this.tracer._exporter.exportCoverage(formattedCoverage) + this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_FINISHED, 'suite', { library: 'istanbul' }) + this.telemetry.distribution(TELEMETRY_CODE_COVERAGE_NUM_FILES, {}, files.length) }) this.addSub('ci:jest:test:start', (test) => { @@ -204,6 +233,11 @@ class JestPlugin extends CiPlugin { span.setTag(TEST_SOURCE_START, testStartLine) } span.finish() + this.telemetry.ciVisEvent( + TELEMETRY_EVENT_FINISHED, + 'test', + { hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] } + ) finishAllTraceSpans(span) }) diff --git a/packages/datadog-plugin-mocha/src/index.js b/packages/datadog-plugin-mocha/src/index.js index c8af76247b1..bef92ffe18e 100644 --- a/packages/datadog-plugin-mocha/src/index.js +++ b/packages/datadog-plugin-mocha/src/index.js @@ -13,9 +13,20 @@ const { addIntelligentTestRunnerSpanTags, TEST_SOURCE_START, TEST_ITR_UNSKIPPABLE, - TEST_ITR_FORCED_RUN + TEST_ITR_FORCED_RUN, + TEST_CODE_OWNERS } = require('../../dd-trace/src/plugins/util/test') const { COMPONENT } = require('../../dd-trace/src/constants') +const { + TELEMETRY_EVENT_CREATED, + TELEMETRY_EVENT_FINISHED, + TELEMETRY_CODE_COVERAGE_STARTED, + TELEMETRY_CODE_COVERAGE_FINISHED, + TELEMETRY_ITR_FORCED_TO_RUN, + TELEMETRY_CODE_COVERAGE_EMPTY, + TELEMETRY_ITR_UNSKIPPABLE, + TELEMETRY_CODE_COVERAGE_NUM_FILES +} = require('../../dd-trace/src/ci-visibility/telemetry') class MochaPlugin extends CiPlugin { static get id () { @@ -35,6 +46,10 @@ class MochaPlugin extends CiPlugin { } const testSuiteSpan = this._testSuites.get(suiteFile) + if (!coverageFiles.length) { + this.telemetry.count(TELEMETRY_CODE_COVERAGE_EMPTY) + } + const relativeCoverageFiles = [...coverageFiles, suiteFile] .map(filename => getTestSuitePath(filename, this.sourceRoot)) @@ -47,6 +62,8 @@ class MochaPlugin extends CiPlugin { } this.tracer._exporter.exportCoverage(formattedCoverage) + this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_FINISHED, 'suite', { library: 'istanbul' }) + this.telemetry.distribution(TELEMETRY_CODE_COVERAGE_NUM_FILES, {}, relativeCoverageFiles.length) }) this.addSub('ci:mocha:test-suite:start', ({ testSuite, isUnskippable, isForcedToRun }) => { @@ -59,9 +76,11 @@ class MochaPlugin extends CiPlugin { ) if (isUnskippable) { testSuiteMetadata[TEST_ITR_UNSKIPPABLE] = 'true' + this.telemetry.count(TELEMETRY_ITR_UNSKIPPABLE, { testLevel: 'suite' }) } if (isForcedToRun) { testSuiteMetadata[TEST_ITR_FORCED_RUN] = 'true' + this.telemetry.count(TELEMETRY_ITR_FORCED_TO_RUN, { testLevel: 'suite' }) } const testSuiteSpan = this.tracer.startSpan('mocha.test_suite', { @@ -72,6 +91,10 @@ class MochaPlugin extends CiPlugin { ...testSuiteMetadata } }) + this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite') + if (this.itrConfig?.isCodeCoverageEnabled) { + this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_STARTED, 'suite', { library: 'istanbul' }) + } this.enter(testSuiteSpan, store) this._testSuites.set(testSuite, testSuiteSpan) }) @@ -85,6 +108,7 @@ class MochaPlugin extends CiPlugin { span.setTag(TEST_STATUS, status) } span.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite') } }) @@ -113,6 +137,11 @@ class MochaPlugin extends CiPlugin { span.setTag(TEST_STATUS, status) span.finish() + this.telemetry.ciVisEvent( + TELEMETRY_EVENT_FINISHED, + 'test', + { hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] } + ) finishAllTraceSpans(span) } }) @@ -179,7 +208,9 @@ class MochaPlugin extends CiPlugin { ) this.testModuleSpan.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module') this.testSessionSpan.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') finishAllTraceSpans(this.testSessionSpan) } this.itrConfig = null diff --git a/packages/datadog-plugin-playwright/src/index.js b/packages/datadog-plugin-playwright/src/index.js index 928477ffc3b..eb8810967c3 100644 --- a/packages/datadog-plugin-playwright/src/index.js +++ b/packages/datadog-plugin-playwright/src/index.js @@ -8,10 +8,15 @@ const { finishAllTraceSpans, getTestSuitePath, getTestSuiteCommonTags, - TEST_SOURCE_START + TEST_SOURCE_START, + TEST_CODE_OWNERS } = require('../../dd-trace/src/plugins/util/test') const { RESOURCE_NAME } = require('../../../ext/tags') const { COMPONENT } = require('../../dd-trace/src/constants') +const { + TELEMETRY_EVENT_CREATED, + TELEMETRY_EVENT_FINISHED +} = require('../../dd-trace/src/ci-visibility/telemetry') class PlaywrightPlugin extends CiPlugin { static get id () { @@ -28,7 +33,9 @@ class PlaywrightPlugin extends CiPlugin { this.testSessionSpan.setTag(TEST_STATUS, status) this.testModuleSpan.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'module') this.testSessionSpan.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') finishAllTraceSpans(this.testSessionSpan) this.tracer._exporter.flush(onDone) }) @@ -52,6 +59,7 @@ class PlaywrightPlugin extends CiPlugin { ...testSuiteMetadata } }) + this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'suite') this.enter(testSuiteSpan, store) this._testSuites.set(testSuite, testSuiteSpan) @@ -63,6 +71,7 @@ class PlaywrightPlugin extends CiPlugin { if (!span) return span.setTag(TEST_STATUS, status) span.finish() + this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'suite') }) this.addSub('ci:playwright:test:start', ({ testName, testSuiteAbsolutePath, testSourceLine }) => { @@ -104,6 +113,13 @@ class PlaywrightPlugin extends CiPlugin { }) span.finish() + + this.telemetry.ciVisEvent( + TELEMETRY_EVENT_FINISHED, + 'test', + { hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] } + ) + finishAllTraceSpans(span) }) } diff --git a/packages/dd-trace/src/ci-visibility/exporters/agentless/coverage-writer.js b/packages/dd-trace/src/ci-visibility/exporters/agentless/coverage-writer.js index 8728e4a2e04..52001672101 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/agentless/coverage-writer.js +++ b/packages/dd-trace/src/ci-visibility/exporters/agentless/coverage-writer.js @@ -5,6 +5,16 @@ const { safeJSONStringify } = require('../../../exporters/common/util') const { CoverageCIVisibilityEncoder } = require('../../../encode/coverage-ci-visibility') const BaseWriter = require('../../../exporters/common/writer') +const { + incrementCountMetric, + distributionMetric, + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS, + TELEMETRY_ENDPOINT_PAYLOAD_BYTES, + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_MS, + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS, + TELEMETRY_ENDPOINT_PAYLOAD_DROPPED, + getErrorTypeFromStatusCode +} = require('../../../ci-visibility/telemetry') class Writer extends BaseWriter { constructor ({ url, evpProxyPrefix = '' }) { @@ -34,8 +44,27 @@ class Writer extends BaseWriter { log.debug(() => `Request to the intake: ${safeJSONStringify(options)}`) - request(form, options, (err, res) => { + const startRequestTime = Date.now() + + incrementCountMetric(TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS, { endpoint: 'code_coverage' }) + distributionMetric(TELEMETRY_ENDPOINT_PAYLOAD_BYTES, { endpoint: 'code_coverage' }, form.size()) + + request(form, options, (err, res, statusCode) => { + distributionMetric( + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_MS, + { endpoint: 'code_coverage' }, + Date.now() - startRequestTime + ) if (err) { + const errorType = getErrorTypeFromStatusCode(statusCode) + incrementCountMetric( + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS, + { endpoint: 'code_coverage', errorType } + ) + incrementCountMetric( + TELEMETRY_ENDPOINT_PAYLOAD_DROPPED, + { endpoint: 'code_coverage' } + ) log.error(err) done() return diff --git a/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js b/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js index d04406f33b9..afbc670443e 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js +++ b/packages/dd-trace/src/ci-visibility/exporters/agentless/writer.js @@ -5,6 +5,16 @@ const log = require('../../../log') const { AgentlessCiVisibilityEncoder } = require('../../../encode/agentless-ci-visibility') const BaseWriter = require('../../../exporters/common/writer') +const { + incrementCountMetric, + distributionMetric, + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS, + TELEMETRY_ENDPOINT_PAYLOAD_BYTES, + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_MS, + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS, + TELEMETRY_ENDPOINT_PAYLOAD_DROPPED, + getErrorTypeFromStatusCode +} = require('../../../ci-visibility/telemetry') class Writer extends BaseWriter { constructor ({ url, tags, evpProxyPrefix = '' }) { @@ -35,8 +45,27 @@ class Writer extends BaseWriter { log.debug(() => `Request to the intake: ${safeJSONStringify(options)}`) - request(data, options, (err, res) => { + const startRequestTime = Date.now() + + incrementCountMetric(TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS, { endpoint: 'test_cycle' }) + distributionMetric(TELEMETRY_ENDPOINT_PAYLOAD_BYTES, { endpoint: 'test_cycle' }, data.length) + + request(data, options, (err, res, statusCode) => { + distributionMetric( + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_MS, + { endpoint: 'test_cycle' }, + Date.now() - startRequestTime + ) if (err) { + const errorType = getErrorTypeFromStatusCode(statusCode) + incrementCountMetric( + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS, + { endpoint: 'test_cycle', errorType } + ) + incrementCountMetric( + TELEMETRY_ENDPOINT_PAYLOAD_DROPPED, + { endpoint: 'test_cycle' } + ) log.error(err) done() return diff --git a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js index 2747b9746e7..fb0329ab637 100644 --- a/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js +++ b/packages/dd-trace/src/ci-visibility/exporters/git/git_metadata.js @@ -15,6 +15,20 @@ const { unshallowRepository } = require('../../../plugins/util/git') +const { + incrementCountMetric, + distributionMetric, + TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS, + TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_MS, + TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_ERRORS, + TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_NUM, + TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES, + TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_MS, + TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_ERRORS, + TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_BYTES, + getErrorTypeFromStatusCode +} = require('../../../ci-visibility/telemetry') + const isValidSha1 = (sha) => /^[0-9a-f]{40}$/.test(sha) const isValidSha256 = (sha) => /^[0-9a-f]{64}$/.test(sha) @@ -74,8 +88,13 @@ function getCommitsToUpload ({ url, repositoryUrl, latestCommits, isEvpProxy }, })) }) - request(localCommitData, options, (err, response) => { + incrementCountMetric(TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS) + const startTime = Date.now() + request(localCommitData, options, (err, response, statusCode) => { + distributionMetric(TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_MS, {}, Date.now() - startTime) if (err) { + const errorType = getErrorTypeFromStatusCode(statusCode) + incrementCountMetric(TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_ERRORS, { errorType }) const error = new Error(`Error fetching commits to exclude: ${err.message}`) return callback(error) } @@ -83,6 +102,7 @@ function getCommitsToUpload ({ url, repositoryUrl, latestCommits, isEvpProxy }, try { alreadySeenCommits = validateCommits(JSON.parse(response).data) } catch (e) { + incrementCountMetric(TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_ERRORS, { errorType: 'network' }) return callback(new Error(`Can't parse commits to exclude response: ${e.message}`)) } log.debug(`There are ${alreadySeenCommits.length} commits to exclude.`) @@ -147,12 +167,20 @@ function uploadPackFile ({ url, isEvpProxy, packFileToUpload, repositoryUrl, hea delete options.headers['dd-api-key'] } + incrementCountMetric(TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES) + + const uploadSize = form.size() + + const startTime = Date.now() request(form, options, (err, _, statusCode) => { + distributionMetric(TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_MS, {}, Date.now() - startTime) if (err) { + const errorType = getErrorTypeFromStatusCode(statusCode) + incrementCountMetric(TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_ERRORS, { errorType }) const error = new Error(`Could not upload packfiles: status code ${statusCode}: ${err.message}`) - return callback(error) + return callback(error, uploadSize) } - callback(null) + callback(null, uploadSize) }) } @@ -173,10 +201,14 @@ function generateAndUploadPackFiles ({ return callback(new Error('Failed to generate packfiles')) } + distributionMetric(TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_NUM, {}, packFilesToUpload.length) let packFileIndex = 0 + let totalUploadedBytes = 0 // This uploads packfiles sequentially - const uploadPackFileCallback = (err) => { + const uploadPackFileCallback = (err, byteLength) => { + totalUploadedBytes += byteLength if (err || packFileIndex === packFilesToUpload.length) { + distributionMetric(TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_BYTES, {}, totalUploadedBytes) return callback(err) } return uploadPackFile( diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js index 2aee819004d..6df4d99ea98 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-itr-configuration.js @@ -1,6 +1,15 @@ const request = require('../../exporters/common/request') const id = require('../../id') const log = require('../../log') +const { + incrementCountMetric, + distributionMetric, + TELEMETRY_GIT_REQUESTS_SETTINGS, + TELEMETRY_GIT_REQUESTS_SETTINGS_MS, + TELEMETRY_GIT_REQUESTS_SETTINGS_ERRORS, + TELEMETRY_GIT_REQUESTS_SETTINGS_RESPONSE, + getErrorTypeFromStatusCode +} = require('../../ci-visibility/telemetry') function getItrConfiguration ({ url, @@ -62,8 +71,14 @@ function getItrConfiguration ({ } }) - request(data, options, (err, res) => { + incrementCountMetric(TELEMETRY_GIT_REQUESTS_SETTINGS) + + const startTime = Date.now() + request(data, options, (err, res, statusCode) => { + distributionMetric(TELEMETRY_GIT_REQUESTS_SETTINGS_MS, {}, Date.now() - startTime) if (err) { + const errorType = getErrorTypeFromStatusCode(statusCode) + incrementCountMetric(TELEMETRY_GIT_REQUESTS_SETTINGS_ERRORS, { errorType }) done(err) } else { try { @@ -91,6 +106,8 @@ function getItrConfiguration ({ log.debug(() => 'Dangerously set test skipping to true') } + incrementCountMetric(TELEMETRY_GIT_REQUESTS_SETTINGS_RESPONSE, settings) + done(null, settings) } catch (err) { done(err) diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js index 04448e9a651..7ee0091a7cb 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js @@ -1,5 +1,16 @@ const request = require('../../exporters/common/request') const log = require('../../log') +const { + incrementCountMetric, + distributionMetric, + TELEMETRY_ITR_SKIPPABLE_TESTS, + TELEMETRY_ITR_SKIPPABLE_TESTS_MS, + TELEMETRY_ITR_SKIPPABLE_TESTS_ERRORS, + TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_SUITES, + TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS, + TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES, + getErrorTypeFromStatusCode +} = require('../../ci-visibility/telemetry') function getSkippableSuites ({ url, @@ -59,8 +70,15 @@ function getSkippableSuites ({ } }) - request(data, options, (err, res) => { + incrementCountMetric(TELEMETRY_ITR_SKIPPABLE_TESTS) + + const startTime = Date.now() + + request(data, options, (err, res, statusCode) => { + distributionMetric(TELEMETRY_ITR_SKIPPABLE_TESTS_MS, {}, Date.now() - startTime) if (err) { + const errorType = getErrorTypeFromStatusCode(statusCode) + incrementCountMetric(TELEMETRY_ITR_SKIPPABLE_TESTS_ERRORS, { errorType }) done(err) } else { let skippableSuites = [] @@ -74,6 +92,13 @@ function getSkippableSuites ({ } return { suite, name } }) + incrementCountMetric( + testLevel === 'test' + ? TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS : TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_SUITES, + {}, + skippableSuites.length + ) + distributionMetric(TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES, {}, res.length) log.debug(() => `Number of received skippable ${testLevel}s: ${skippableSuites.length}`) done(null, skippableSuites) } catch (err) { diff --git a/packages/dd-trace/src/ci-visibility/telemetry.js b/packages/dd-trace/src/ci-visibility/telemetry.js new file mode 100644 index 00000000000..1bc01c502c9 --- /dev/null +++ b/packages/dd-trace/src/ci-visibility/telemetry.js @@ -0,0 +1,130 @@ +const telemetryMetrics = require('../telemetry/metrics') + +const ciVisibilityMetrics = telemetryMetrics.manager.namespace('civisibility') + +const formattedTags = { + testLevel: 'event_type', + testFramework: 'test_framework', + errorType: 'error_type', + exitCode: 'exit_code', + isCodeCoverageEnabled: 'coverage_enabled', + isSuitesSkippingEnabled: 'itrskip_enabled', + hasCodeOwners: 'has_code_owners', + isUnsupportedCIProvider: 'is_unsupported_ci' +} + +// Transform tags dictionary to array of strings. +// If tag value is true, then only tag key is added to the array. +function formatMetricTags (tagsDictionary) { + return Object.keys(tagsDictionary).reduce((acc, tagKey) => { + const formattedTagKey = formattedTags[tagKey] || tagKey + if (tagsDictionary[tagKey] === true) { + acc.push(formattedTagKey) + } else if (tagsDictionary[tagKey] !== undefined && tagsDictionary[tagKey] !== null) { + acc.push(`${formattedTagKey}:${tagsDictionary[tagKey]}`) + } + return acc + }, []) +} + +function incrementCountMetric (name, tags = {}, value = 1) { + ciVisibilityMetrics.count(name, formatMetricTags(tags)).inc(value) +} + +function distributionMetric (name, tags, measure) { + ciVisibilityMetrics.distribution(name, formatMetricTags(tags)).track(measure) +} + +// CI Visibility telemetry events +const TELEMETRY_EVENT_CREATED = 'event_created' +const TELEMETRY_EVENT_FINISHED = 'event_finished' +const TELEMETRY_CODE_COVERAGE_STARTED = 'code_coverage_started' +const TELEMETRY_CODE_COVERAGE_FINISHED = 'code_coverage_finished' +const TELEMETRY_ITR_SKIPPED = 'itr_skipped' +const TELEMETRY_ITR_UNSKIPPABLE = 'itr_unskippable' +const TELEMETRY_ITR_FORCED_TO_RUN = 'itr_forced_run' +const TELEMETRY_CODE_COVERAGE_EMPTY = 'code_coverage.is_empty' +const TELEMETRY_CODE_COVERAGE_NUM_FILES = 'code_coverage.files' +const TELEMETRY_EVENTS_ENQUEUED_FOR_SERIALIZATION = 'events_enqueued_for_serialization' +const TELEMETRY_ENDPOINT_PAYLOAD_SERIALIZATION_MS = 'endpoint_payload.events_serialization_ms' +const TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS = 'endpoint_payload.requests' +const TELEMETRY_ENDPOINT_PAYLOAD_BYTES = 'endpoint_payload.bytes' +const TELEMETRY_ENDPOINT_PAYLOAD_EVENTS_COUNT = 'endpoint_payload.events_count' +const TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_MS = 'endpoint_payload.requests_ms' +const TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS = 'endpoint_payload.requests_errors' +const TELEMETRY_ENDPOINT_PAYLOAD_DROPPED = 'endpoint_payload.dropped' +const TELEMETRY_GIT_COMMAND = 'git.command' +const TELEMETRY_GIT_COMMAND_MS = 'git.command_ms' +const TELEMETRY_GIT_COMMAND_ERRORS = 'git.command_errors' +const TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS = 'git_requests.search_commits' +const TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_MS = 'git_requests.search_commits_ms' +const TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_ERRORS = 'git_requests.search_commits_errors' +const TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES = 'git_requests.objects_pack' +const TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_MS = 'git_requests.objects_pack_ms' +const TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_ERRORS = 'git_requests.objects_pack_errors' +const TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_NUM = 'git_requests.objects_pack_files' +const TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_BYTES = 'git_requests.objects_pack_bytes' +const TELEMETRY_GIT_REQUESTS_SETTINGS = 'git_requests.settings' +const TELEMETRY_GIT_REQUESTS_SETTINGS_MS = 'git_requests.settings_ms' +const TELEMETRY_GIT_REQUESTS_SETTINGS_ERRORS = 'git_requests.settings_errors' +const TELEMETRY_GIT_REQUESTS_SETTINGS_RESPONSE = 'git_requests.settings_response' +const TELEMETRY_ITR_SKIPPABLE_TESTS = 'itr_skippable_tests.request' +const TELEMETRY_ITR_SKIPPABLE_TESTS_MS = 'itr_skippable_tests.request_ms' +const TELEMETRY_ITR_SKIPPABLE_TESTS_ERRORS = 'itr_skippable_tests.request_errors' +const TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_SUITES = 'itr_skippable_tests.response_suites' +const TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS = 'itr_skippable_tests.response_tests' +const TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES = 'itr_skippable_tests.response_bytes' + +function getErrorTypeFromStatusCode (statusCode) { + if (statusCode >= 400 && statusCode < 500) { + return 'status_code_4xx_response' + } + if (statusCode >= 500) { + return 'status_code_5xx_response' + } + return 'network' +} + +module.exports = { + incrementCountMetric, + distributionMetric, + TELEMETRY_EVENT_CREATED, + TELEMETRY_EVENT_FINISHED, + TELEMETRY_CODE_COVERAGE_STARTED, + TELEMETRY_CODE_COVERAGE_FINISHED, + TELEMETRY_ITR_SKIPPED, + TELEMETRY_ITR_UNSKIPPABLE, + TELEMETRY_ITR_FORCED_TO_RUN, + TELEMETRY_CODE_COVERAGE_EMPTY, + TELEMETRY_CODE_COVERAGE_NUM_FILES, + TELEMETRY_EVENTS_ENQUEUED_FOR_SERIALIZATION, + TELEMETRY_ENDPOINT_PAYLOAD_SERIALIZATION_MS, + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS, + TELEMETRY_ENDPOINT_PAYLOAD_BYTES, + TELEMETRY_ENDPOINT_PAYLOAD_EVENTS_COUNT, + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_MS, + TELEMETRY_ENDPOINT_PAYLOAD_REQUESTS_ERRORS, + TELEMETRY_ENDPOINT_PAYLOAD_DROPPED, + TELEMETRY_GIT_COMMAND, + TELEMETRY_GIT_COMMAND_MS, + TELEMETRY_GIT_COMMAND_ERRORS, + TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS, + TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_MS, + TELEMETRY_GIT_REQUESTS_SEARCH_COMMITS_ERRORS, + TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_NUM, + TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_BYTES, + TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES, + TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_MS, + TELEMETRY_GIT_REQUESTS_OBJECT_PACKFILES_ERRORS, + TELEMETRY_GIT_REQUESTS_SETTINGS, + TELEMETRY_GIT_REQUESTS_SETTINGS_MS, + TELEMETRY_GIT_REQUESTS_SETTINGS_ERRORS, + TELEMETRY_GIT_REQUESTS_SETTINGS_RESPONSE, + TELEMETRY_ITR_SKIPPABLE_TESTS, + TELEMETRY_ITR_SKIPPABLE_TESTS_MS, + TELEMETRY_ITR_SKIPPABLE_TESTS_ERRORS, + TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_SUITES, + TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS, + TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES, + getErrorTypeFromStatusCode +} diff --git a/packages/dd-trace/src/encode/agentless-ci-visibility.js b/packages/dd-trace/src/encode/agentless-ci-visibility.js index c0111d19679..cf9c180ce3a 100644 --- a/packages/dd-trace/src/encode/agentless-ci-visibility.js +++ b/packages/dd-trace/src/encode/agentless-ci-visibility.js @@ -3,8 +3,13 @@ const { truncateSpan, normalizeSpan } = require('./tags-processors') const { AgentEncoder } = require('./0.4') const { version: ddTraceVersion } = require('../../../../package.json') const id = require('../../../dd-trace/src/id') -const ENCODING_VERSION = 1 +const { + distributionMetric, + TELEMETRY_ENDPOINT_PAYLOAD_SERIALIZATION_MS, + TELEMETRY_ENDPOINT_PAYLOAD_EVENTS_COUNT +} = require('../ci-visibility/telemetry') +const ENCODING_VERSION = 1 const ALLOWED_CONTENT_TYPES = ['test_session_end', 'test_module_end', 'test_suite_end', 'test'] const TEST_SUITE_KEYS_LENGTH = 12 @@ -247,6 +252,8 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { } _encode (bytes, trace) { + const startTime = Date.now() + const rawEvents = trace.map(formatSpan) const testSessionEvents = rawEvents.filter( @@ -261,9 +268,15 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { for (const event of events) { this._encodeEvent(bytes, event) } + distributionMetric( + TELEMETRY_ENDPOINT_PAYLOAD_SERIALIZATION_MS, + { endpoint: 'test_cycle' }, + Date.now() - startTime + ) } makePayload () { + distributionMetric(TELEMETRY_ENDPOINT_PAYLOAD_EVENTS_COUNT, { endpoint: 'test_cycle' }, this._eventCount) const bytes = this._traceBytes const eventsOffset = this._eventsOffset const eventsCount = this._eventCount diff --git a/packages/dd-trace/src/encode/coverage-ci-visibility.js b/packages/dd-trace/src/encode/coverage-ci-visibility.js index a877e11e864..75da679340c 100644 --- a/packages/dd-trace/src/encode/coverage-ci-visibility.js +++ b/packages/dd-trace/src/encode/coverage-ci-visibility.js @@ -2,6 +2,11 @@ const { AgentEncoder } = require('./0.4') const Chunk = require('./chunk') +const { + distributionMetric, + TELEMETRY_ENDPOINT_PAYLOAD_SERIALIZATION_MS, + TELEMETRY_ENDPOINT_PAYLOAD_EVENTS_COUNT +} = require('../ci-visibility/telemetry') const FormData = require('../exporters/common/form-data') const COVERAGE_PAYLOAD_VERSION = 2 @@ -21,8 +26,16 @@ class CoverageCIVisibilityEncoder extends AgentEncoder { } encode (coverage) { + const startTime = Date.now() + this._coveragesCount++ this.encodeCodeCoverage(this._coverageBytes, coverage) + + distributionMetric( + TELEMETRY_ENDPOINT_PAYLOAD_SERIALIZATION_MS, + { endpoint: 'code_coverage' }, + Date.now() - startTime + ) } encodeCodeCoverage (bytes, coverage) { @@ -73,6 +86,7 @@ class CoverageCIVisibilityEncoder extends AgentEncoder { } makePayload () { + distributionMetric(TELEMETRY_ENDPOINT_PAYLOAD_EVENTS_COUNT, { endpoint: 'code_coverage' }, this._coveragesCount) const bytes = this._coverageBytes const coveragesOffset = this._coveragesOffset diff --git a/packages/dd-trace/src/exporters/common/agent-info-exporter.js b/packages/dd-trace/src/exporters/common/agent-info-exporter.js index 9d1c45195bc..923b7eef0ef 100644 --- a/packages/dd-trace/src/exporters/common/agent-info-exporter.js +++ b/packages/dd-trace/src/exporters/common/agent-info-exporter.js @@ -1,6 +1,7 @@ const { URL, format } = require('url') const request = require('./request') +const { incrementCountMetric, TELEMETRY_EVENTS_ENQUEUED_FOR_SERIALIZATION } = require('../../ci-visibility/telemetry') function fetchAgentInfo (url, callback) { request('', { @@ -49,6 +50,9 @@ class AgentInfoExporter { } _export (payload, writer = this._writer, timerKey = '_timer') { + if (this._config.isCiVisibility) { + incrementCountMetric(TELEMETRY_EVENTS_ENQUEUED_FOR_SERIALIZATION, {}, payload.length) + } writer.append(payload) const { flushInterval } = this._config diff --git a/packages/dd-trace/src/exporters/common/form-data.js b/packages/dd-trace/src/exporters/common/form-data.js index b20e97b8864..dacd495b160 100644 --- a/packages/dd-trace/src/exporters/common/form-data.js +++ b/packages/dd-trace/src/exporters/common/form-data.js @@ -21,6 +21,10 @@ class FormData extends Readable { } } + size () { + return this._data.reduce((size, chunk) => size + chunk.length, 0) + } + getHeaders () { return { 'Content-Type': 'multipart/form-data; boundary=' + this._boundary } } diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index 0112c4cb4fa..5d9ff3af5cf 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -20,6 +20,14 @@ const { const Plugin = require('./plugin') const { COMPONENT } = require('../constants') const log = require('../log') +const { + incrementCountMetric, + distributionMetric, + TELEMETRY_EVENT_CREATED, + TELEMETRY_ITR_SKIPPED +} = require('../ci-visibility/telemetry') +const { CI_PROVIDER_NAME, GIT_REPOSITORY_URL, GIT_COMMIT_SHA, GIT_BRANCH } = require('./util/tags') +const { OS_VERSION, OS_PLATFORM, OS_ARCHITECTURE, RUNTIME_NAME, RUNTIME_VERSION } = require('./util/env') module.exports = class CiPlugin extends Plugin { constructor (...args) { @@ -71,6 +79,7 @@ module.exports = class CiPlugin extends Plugin { ...testSessionSpanMetadata } }) + this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'session') this.testModuleSpan = this.tracer.startSpan(`${this.constructor.id}.test_module`, { childOf: this.testSessionSpan, tags: { @@ -79,6 +88,7 @@ module.exports = class CiPlugin extends Plugin { ...testModuleSpanMetadata } }) + this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'module') }) this.addSub(`ci:${this.constructor.id}:itr:skipped-suites`, ({ skippedSuites, frameworkVersion }) => { @@ -97,25 +107,49 @@ module.exports = class CiPlugin extends Plugin { } }).finish() }) + this.telemetry.count(TELEMETRY_ITR_SKIPPED, { testLevel: 'suite' }, skippedSuites.length) }) } + get telemetry () { + const testFramework = this.constructor.id + return { + ciVisEvent: function (name, testLevel, tags = {}) { + incrementCountMetric(name, { + testLevel, + testFramework, + isUnsupportedCIProvider: this.isUnsupportedCIProvider, + ...tags + }) + }, + count: function (name, tags, value = 1) { + incrementCountMetric(name, tags, value) + }, + distribution: function (name, tags, measure) { + distributionMetric(name, tags, measure) + } + } + } + configure (config) { super.configure(config) this.testEnvironmentMetadata = getTestEnvironmentMetadata(this.constructor.id, this.config) this.codeOwnersEntries = getCodeOwnersFileEntries() const { - 'git.repository_url': repositoryUrl, - 'git.commit.sha': sha, - 'os.version': osVersion, - 'os.platform': osPlatform, - 'os.architecture': osArchitecture, - 'runtime.name': runtimeName, - 'runtime.version': runtimeVersion, - 'git.branch': branch + [GIT_REPOSITORY_URL]: repositoryUrl, + [GIT_COMMIT_SHA]: sha, + [OS_VERSION]: osVersion, + [OS_PLATFORM]: osPlatform, + [OS_ARCHITECTURE]: osArchitecture, + [RUNTIME_NAME]: runtimeName, + [RUNTIME_VERSION]: runtimeVersion, + [GIT_BRANCH]: branch, + [CI_PROVIDER_NAME]: ciProviderName } = this.testEnvironmentMetadata + this.isUnsupportedCIProvider = !ciProviderName + this.testConfiguration = { repositoryUrl, sha, @@ -170,6 +204,8 @@ module.exports = class CiPlugin extends Plugin { } } + this.telemetry.ciVisEvent(TELEMETRY_EVENT_CREATED, 'test', { hasCodeOwners: !!codeOwners }) + const testSpan = this.tracer .startSpan(`${this.constructor.id}.test`, { childOf, diff --git a/packages/dd-trace/src/plugins/util/exec.js b/packages/dd-trace/src/plugins/util/exec.js index a2d091232c6..3e3ca3f3660 100644 --- a/packages/dd-trace/src/plugins/util/exec.js +++ b/packages/dd-trace/src/plugins/util/exec.js @@ -1,10 +1,31 @@ const cp = require('child_process') const log = require('../../log') +const { distributionMetric, incrementCountMetric } = require('../../ci-visibility/telemetry') -const sanitizedExec = (cmd, flags, options = { stdio: 'pipe' }) => { +const sanitizedExec = ( + cmd, + flags, + operationMetric, + durationMetric, + errorMetric +) => { + let startTime + if (operationMetric) { + incrementCountMetric(operationMetric.name, operationMetric.tags) + } + if (durationMetric) { + startTime = Date.now() + } try { - return cp.execFileSync(cmd, flags, options).toString().replace(/(\r\n|\n|\r)/gm, '') + const result = cp.execFileSync(cmd, flags, { stdio: 'pipe' }).toString().replace(/(\r\n|\n|\r)/gm, '') + if (durationMetric) { + distributionMetric(durationMetric.name, durationMetric.tags, Date.now() - startTime) + } + return result } catch (e) { + if (errorMetric) { + incrementCountMetric(errorMetric.name, { ...errorMetric.tags, exitCode: e.status }) + } log.error(e) return '' } diff --git a/packages/dd-trace/src/plugins/util/git.js b/packages/dd-trace/src/plugins/util/git.js index 72ca5db3b59..885cbe5fb3c 100644 --- a/packages/dd-trace/src/plugins/util/git.js +++ b/packages/dd-trace/src/plugins/util/git.js @@ -1,10 +1,9 @@ -const { execFileSync } = require('child_process') +const cp = require('child_process') const os = require('os') const path = require('path') const fs = require('fs') const log = require('../../log') -const { sanitizedExec } = require('./exec') const { GIT_COMMIT_SHA, GIT_BRANCH, @@ -19,10 +18,46 @@ const { GIT_COMMIT_AUTHOR_NAME, CI_WORKSPACE_PATH } = require('./tags') +const { + incrementCountMetric, + distributionMetric, + TELEMETRY_GIT_COMMAND, + TELEMETRY_GIT_COMMAND_MS, + TELEMETRY_GIT_COMMAND_ERRORS +} = require('../../ci-visibility/telemetry') const { filterSensitiveInfoFromRepository } = require('./url') const GIT_REV_LIST_MAX_BUFFER = 8 * 1024 * 1024 // 8MB +function sanitizedExec ( + cmd, + flags, + operationMetric, + durationMetric, + errorMetric +) { + let startTime + if (operationMetric) { + incrementCountMetric(operationMetric.name, operationMetric.tags) + } + if (durationMetric) { + startTime = Date.now() + } + try { + const result = cp.execFileSync(cmd, flags, { stdio: 'pipe' }).toString().replace(/(\r\n|\n|\r)/gm, '') + if (durationMetric) { + distributionMetric(durationMetric.name, durationMetric.tags, Date.now() - startTime) + } + return result + } catch (e) { + if (errorMetric) { + incrementCountMetric(errorMetric.name, { ...errorMetric.tags, exitCode: e.status }) + } + log.error(e) + return '' + } +} + function isDirectory (path) { try { const stats = fs.statSync(path) @@ -33,7 +68,13 @@ function isDirectory (path) { } function isShallowRepository () { - return sanitizedExec('git', ['rev-parse', '--is-shallow-repository']) === 'true' + return sanitizedExec( + 'git', + ['rev-parse', '--is-shallow-repository'], + { name: TELEMETRY_GIT_COMMAND, tags: { command: 'check_shallow' } }, + { name: TELEMETRY_GIT_COMMAND_MS, tags: { command: 'check_shallow' } }, + { name: TELEMETRY_GIT_COMMAND_ERRORS, tags: { command: 'check_shallow' } } + ) === 'true' } function getGitVersion () { @@ -72,50 +113,76 @@ function unshallowRepository () { defaultRemoteName ] + incrementCountMetric(TELEMETRY_GIT_COMMAND, { command: 'unshallow' }) + const start = Date.now() try { - execFileSync('git', [ + cp.execFileSync('git', [ ...baseGitOptions, revParseHead ], { stdio: 'pipe' }) - } catch (e) { + } catch (err) { // If the local HEAD is a commit that has not been pushed to the remote, the above command will fail. - log.error(e) + log.error(err) + incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'unshallow', exitCode: err.status }) const upstreamRemote = sanitizedExec('git', ['rev-parse', '--abbrev-ref', '--symbolic-full-name', '@{upstream}']) try { - execFileSync('git', [ + cp.execFileSync('git', [ ...baseGitOptions, upstreamRemote ], { stdio: 'pipe' }) - } catch (e) { + } catch (err) { // If the CI is working on a detached HEAD or branch tracking hasn’t been set up, the above command will fail. - log.error(e) + log.error(err) + incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'unshallow', exitCode: err.status }) // We use sanitizedExec here because if this last option fails, we'll give up. - sanitizedExec('git', baseGitOptions) + sanitizedExec( + 'git', + baseGitOptions, + null, + null, + { name: TELEMETRY_GIT_COMMAND_ERRORS, tags: { command: 'unshallow' } } // we log the error in sanitizedExec + ) } } + distributionMetric(TELEMETRY_GIT_COMMAND_MS, { command: 'unshallow' }, Date.now() - start) } function getRepositoryUrl () { - return sanitizedExec('git', ['config', '--get', 'remote.origin.url']) + return sanitizedExec( + 'git', + ['config', '--get', 'remote.origin.url'], + { name: TELEMETRY_GIT_COMMAND, tags: { command: 'get_repository' } }, + { name: TELEMETRY_GIT_COMMAND_MS, tags: { command: 'get_repository' } }, + { name: TELEMETRY_GIT_COMMAND_ERRORS, tags: { command: 'get_repository' } } + ) } function getLatestCommits () { + incrementCountMetric(TELEMETRY_GIT_COMMAND, { command: 'get_local_commits' }) + const startTime = Date.now() try { - return execFileSync('git', ['log', '--format=%H', '-n 1000', '--since="1 month ago"'], { stdio: 'pipe' }) + const result = cp.execFileSync('git', ['log', '--format=%H', '-n 1000', '--since="1 month ago"'], { stdio: 'pipe' }) .toString() .split('\n') .filter(commit => commit) + distributionMetric(TELEMETRY_GIT_COMMAND_MS, { command: 'get_local_commits' }, Date.now() - startTime) + return result } catch (err) { log.error(`Get latest commits failed: ${err.message}`) + incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'get_local_commits', errorType: err.status }) return [] } } function getCommitsRevList (commitsToExclude, commitsToInclude) { + let result = [] + const commitsToExcludeString = commitsToExclude.map(commit => `^${commit}`) + incrementCountMetric(TELEMETRY_GIT_COMMAND, { command: 'get_objects' }) + const startTime = Date.now() try { - return execFileSync( + result = cp.execFileSync( 'git', [ 'rev-list', @@ -132,11 +199,14 @@ function getCommitsRevList (commitsToExclude, commitsToInclude) { .filter(commit => commit) } catch (err) { log.error(`Get commits to upload failed: ${err.message}`) - return [] + incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'get_objects', errorType: err.status }) } + distributionMetric(TELEMETRY_GIT_COMMAND_MS, { command: 'get_objects' }, Date.now() - startTime) + return result } function generatePackFilesForCommits (commitsToUpload) { + let result = [] const tmpFolder = os.tmpdir() if (!isDirectory(tmpFolder)) { @@ -148,10 +218,12 @@ function generatePackFilesForCommits (commitsToUpload) { const temporaryPath = path.join(tmpFolder, randomPrefix) const cwdPath = path.join(process.cwd(), randomPrefix) + incrementCountMetric(TELEMETRY_GIT_COMMAND, { command: 'pack_objects' }) + const startTime = Date.now() // Generates pack files to upload and // returns the ordered list of packfiles' paths function execGitPackObjects (targetPath) { - return execFileSync( + return cp.execFileSync( 'git', [ 'pack-objects', @@ -164,9 +236,10 @@ function generatePackFilesForCommits (commitsToUpload) { } try { - return execGitPackObjects(temporaryPath) + result = execGitPackObjects(temporaryPath) } catch (err) { log.error(err) + incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'pack_objects', errorType: err.status }) /** * The generation of pack files in the temporary folder (from `os.tmpdir()`) * sometimes fails in certain CI setups with the error message @@ -180,13 +253,15 @@ function generatePackFilesForCommits (commitsToUpload) { * TODO: fix issue and remove workaround. */ try { - return execGitPackObjects(cwdPath) + result = execGitPackObjects(cwdPath) } catch (err) { log.error(err) + incrementCountMetric(TELEMETRY_GIT_COMMAND_ERRORS, { command: 'pack_objects', errorType: err.status }) } - - return [] } + distributionMetric(TELEMETRY_GIT_COMMAND_MS, { command: 'pack_objects' }, Date.now() - startTime) + + return result } // If there is ciMetadata, it takes precedence. diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js index a2db5e13971..f7a300309b6 100644 --- a/packages/dd-trace/src/telemetry/index.js +++ b/packages/dd-trace/src/telemetry/index.js @@ -129,6 +129,10 @@ function onBeforeExit () { process.removeListener('beforeExit', onBeforeExit) const { reqType, payload } = createPayload('app-closing') sendData(config, application, host, reqType, payload) + // we flush before shutting down. Only in CI Visibility + if (config.isCiVisibility) { + metricsManager.send(config, application, host) + } } function createAppObject (config) { diff --git a/packages/dd-trace/test/ci-visibility/exporters/agentless/coverage-writer.spec.js b/packages/dd-trace/test/ci-visibility/exporters/agentless/coverage-writer.spec.js index 7bfdd197866..62e10e9753e 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/agentless/coverage-writer.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/agentless/coverage-writer.spec.js @@ -23,7 +23,8 @@ describe('CI Visibility Coverage Writer', () => { count: sinon.stub().returns(0), makePayload: sinon.stub().returns({ getHeaders: () => ({}), - pipe: () => {} + pipe: () => {}, + size: () => 1 }) } @@ -80,7 +81,8 @@ describe('CI Visibility Coverage Writer', () => { encoder.count.returns(2) const payload = { getHeaders: () => ({}), - pipe: () => {} + pipe: () => {}, + size: () => 1 } encoder.makePayload.returns(payload) @@ -101,7 +103,8 @@ describe('CI Visibility Coverage Writer', () => { const payload = { getHeaders: () => ({}), - pipe: () => {} + pipe: () => {}, + size: () => 1 } encoder.count.returns(1) diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index f3d331be567..54c182dd7c1 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -3,6 +3,7 @@ require('../../../../dd-trace/test/setup/tap') const cp = require('child_process') +const fs = require('fs') const CiVisibilityExporter = require('../../../src/ci-visibility/exporters/ci-visibility-exporter') const nock = require('nock') @@ -13,6 +14,7 @@ describe('CI Visibility Exporter', () => { beforeEach(() => { // to make sure `isShallowRepository` in `git.js` returns false sinon.stub(cp, 'execFileSync').returns('false') + sinon.stub(fs, 'readFileSync').returns('') process.env.DD_API_KEY = '1' nock.cleanAll() }) diff --git a/packages/dd-trace/test/plugins/util/git.spec.js b/packages/dd-trace/test/plugins/util/git.spec.js index c04bd371dbb..7f275287678 100644 --- a/packages/dd-trace/test/plugins/util/git.spec.js +++ b/packages/dd-trace/test/plugins/util/git.spec.js @@ -9,7 +9,6 @@ const path = require('path') const { GIT_REV_LIST_MAX_BUFFER } = require('../../../src/plugins/util/git') const proxyquire = require('proxyquire') -const sanitizedExecStub = sinon.stub().returns('') const execFileSyncStub = sinon.stub().returns('') const { @@ -29,9 +28,6 @@ const { const { getGitMetadata, unshallowRepository } = proxyquire('../../../src/plugins/util/git', { - './exec': { - sanitizedExec: sanitizedExecStub - }, 'child_process': { execFileSync: execFileSyncStub } @@ -47,7 +43,7 @@ function getFakeDirectory () { describe('git', () => { afterEach(() => { - sanitizedExecStub.reset() + execFileSyncStub.reset() delete process.env.DD_GIT_COMMIT_SHA delete process.env.DD_GIT_REPOSITORY_URL delete process.env.DD_GIT_BRANCH @@ -80,15 +76,15 @@ describe('git', () => { } ) expect(metadata[GIT_REPOSITORY_URL]).not.to.equal('ciRepositoryUrl') - expect(sanitizedExecStub).to.have.been.calledWith('git', ['ls-remote', '--get-url']) - expect(sanitizedExecStub).to.have.been.calledWith('git', ['show', '-s', '--format=%an,%ae,%aI,%cn,%ce,%cI']) - expect(sanitizedExecStub).not.to.have.been.calledWith('git', ['show', '-s', '--format=%s']) - expect(sanitizedExecStub).not.to.have.been.calledWith('git', ['rev-parse', 'HEAD']) - expect(sanitizedExecStub).not.to.have.been.calledWith('git', ['rev-parse', '--abbrev-ref', 'HEAD']) - expect(sanitizedExecStub).not.to.have.been.calledWith('git', ['rev-parse', '--show-toplevel']) + expect(execFileSyncStub).to.have.been.calledWith('git', ['ls-remote', '--get-url']) + expect(execFileSyncStub).to.have.been.calledWith('git', ['show', '-s', '--format=%an,%ae,%aI,%cn,%ce,%cI']) + expect(execFileSyncStub).not.to.have.been.calledWith('git', ['show', '-s', '--format=%s']) + expect(execFileSyncStub).not.to.have.been.calledWith('git', ['rev-parse', 'HEAD']) + expect(execFileSyncStub).not.to.have.been.calledWith('git', ['rev-parse', '--abbrev-ref', 'HEAD']) + expect(execFileSyncStub).not.to.have.been.calledWith('git', ['rev-parse', '--show-toplevel']) }) it('does not crash if git is not available', () => { - sanitizedExecStub.returns('') + execFileSyncStub.returns('') const ciMetadata = { repositoryUrl: 'https://github.com/datadog/safe-repository.git' } const metadata = getGitMetadata(ciMetadata) expect(metadata).to.eql({ @@ -107,7 +103,7 @@ describe('git', () => { }) }) it('returns all git metadata is git is available', () => { - sanitizedExecStub + execFileSyncStub .onCall(0).returns( 'git author,git.author@email.com,2022-02-14T16:22:03-05:00,' + 'git committer,git.committer@email.com,2022-02-14T16:23:03-05:00' @@ -133,12 +129,12 @@ describe('git', () => { [GIT_COMMIT_COMMITTER_NAME]: 'git committer', [CI_WORKSPACE_PATH]: 'ciWorkspacePath' }) - expect(sanitizedExecStub).to.have.been.calledWith('git', ['ls-remote', '--get-url']) - expect(sanitizedExecStub).to.have.been.calledWith('git', ['show', '-s', '--format=%s']) - expect(sanitizedExecStub).to.have.been.calledWith('git', ['show', '-s', '--format=%an,%ae,%aI,%cn,%ce,%cI']) - expect(sanitizedExecStub).to.have.been.calledWith('git', ['rev-parse', 'HEAD']) - expect(sanitizedExecStub).to.have.been.calledWith('git', ['rev-parse', '--abbrev-ref', 'HEAD']) - expect(sanitizedExecStub).to.have.been.calledWith('git', ['rev-parse', '--show-toplevel']) + expect(execFileSyncStub).to.have.been.calledWith('git', ['ls-remote', '--get-url']) + expect(execFileSyncStub).to.have.been.calledWith('git', ['show', '-s', '--format=%s']) + expect(execFileSyncStub).to.have.been.calledWith('git', ['show', '-s', '--format=%an,%ae,%aI,%cn,%ce,%cI']) + expect(execFileSyncStub).to.have.been.calledWith('git', ['rev-parse', 'HEAD']) + expect(execFileSyncStub).to.have.been.calledWith('git', ['rev-parse', '--abbrev-ref', 'HEAD']) + expect(execFileSyncStub).to.have.been.calledWith('git', ['rev-parse', '--show-toplevel']) }) }) @@ -246,11 +242,10 @@ describe('generatePackFilesForCommits', () => { describe('unshallowRepository', () => { afterEach(() => { - sanitizedExecStub.reset() execFileSyncStub.reset() }) it('works for the usual case', () => { - sanitizedExecStub + execFileSyncStub .onCall(0).returns( 'git version 2.39.0' ) @@ -271,16 +266,14 @@ describe('unshallowRepository', () => { expect(execFileSyncStub).to.have.been.calledWith('git', options) }) it('works if the local HEAD is a commit that has not been pushed to the remote', () => { - sanitizedExecStub + execFileSyncStub .onCall(0).returns( 'git version 2.39.0' ) .onCall(1).returns('origin') .onCall(2).returns('daede5785233abb1a3cb76b9453d4eb5b98290b3') - .onCall(3).returns('origin/master') - - execFileSyncStub - .onCall(0).throws() + .onCall(3).throws() + .onCall(4).returns('origin/master') const options = [ 'fetch', @@ -296,17 +289,15 @@ describe('unshallowRepository', () => { expect(execFileSyncStub).to.have.been.calledWith('git', options) }) it('works if the CI is working on a detached HEAD or branch tracking hasn’t been set up', () => { - sanitizedExecStub + execFileSyncStub .onCall(0).returns( 'git version 2.39.0' ) .onCall(1).returns('origin') .onCall(2).returns('daede5785233abb1a3cb76b9453d4eb5b98290b3') - .onCall(3).returns('origin/master') - - execFileSyncStub - .onCall(0).throws() - .onCall(1).throws() + .onCall(3).throws() + .onCall(4).returns('origin/master') + .onCall(5).throws() const options = [ 'fetch', @@ -318,17 +309,17 @@ describe('unshallowRepository', () => { ] unshallowRepository() - expect(sanitizedExecStub).to.have.been.calledWith('git', options) + expect(execFileSyncStub).to.have.been.calledWith('git', options) }) }) describe('user credentials', () => { afterEach(() => { - sanitizedExecStub.reset() + execFileSyncStub.reset() execFileSyncStub.reset() }) it('scrubs https user credentials', () => { - sanitizedExecStub + execFileSyncStub .onCall(0).returns( 'git author,git.author@email.com,2022-02-14T16:22:03-05:00,' + 'git committer,git.committer@email.com,2022-02-14T16:23:03-05:00' @@ -340,7 +331,7 @@ describe('user credentials', () => { .to.equal('https://github.com/datadog/safe-repository.git') }) it('scrubs ssh user credentials', () => { - sanitizedExecStub + execFileSyncStub .onCall(0).returns( 'git author,git.author@email.com,2022-02-14T16:22:03-05:00,' + 'git committer,git.committer@email.com,2022-02-14T16:23:03-05:00' From 6cf4e71cc14e54e630e9797ac1aac285a56797c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Fri, 22 Dec 2023 15:55:17 +0100 Subject: [PATCH 136/147] [ci-visibility] Fix cucumber plugin tests for node<16 (#3902) --- packages/datadog-plugin-cucumber/test/index.spec.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/datadog-plugin-cucumber/test/index.spec.js b/packages/datadog-plugin-cucumber/test/index.spec.js index 71023f58d0f..33e7c753baa 100644 --- a/packages/datadog-plugin-cucumber/test/index.spec.js +++ b/packages/datadog-plugin-cucumber/test/index.spec.js @@ -1,6 +1,7 @@ 'use strict' const path = require('path') const { PassThrough } = require('stream') +const semver = require('semver') const proxyquire = require('proxyquire').noPreserveCache() const nock = require('nock') @@ -23,6 +24,7 @@ const { TEST_SOURCE_START } = require('../../dd-trace/src/plugins/util/test') +const { NODE_MAJOR } = require('../../../version') const { version: ddTraceVersion } = require('../../../package.json') const runCucumber = (version, Cucumber, requireName, featureName, testName) => { @@ -54,6 +56,9 @@ describe('Plugin', function () { let Cucumber this.timeout(10000) withVersions('cucumber', '@cucumber/cucumber', version => { + const specificVersion = require(`../../../versions/@cucumber/cucumber@${version}`).version() + if ((NODE_MAJOR <= 16) && semver.satisfies(specificVersion, '>=10')) return + afterEach(() => { // > If you want to run tests multiple times, you may need to clear Node's require cache // before subsequent calls in whichever manner best suits your needs. From a8640d321e44e86af5016c88a00f5e5a3b835b69 Mon Sep 17 00:00:00 2001 From: Ayan Khan Date: Wed, 27 Dec 2023 15:00:57 -0500 Subject: [PATCH 137/147] add support for node specifiers (#3893) * add support for node specifiers --- .../src/child-process.js | 9 +- .../datadog-instrumentations/src/crypto.js | 3 +- packages/datadog-instrumentations/src/dns.js | 3 +- .../src/helpers/hooks.js | 9 +- .../src/helpers/instrument.js | 11 +- .../src/helpers/register.js | 20 +- .../src/http/client.js | 4 +- .../src/http/server.js | 11 +- .../src/http2/client.js | 4 +- .../src/http2/server.js | 4 +- packages/datadog-instrumentations/src/net.js | 12 +- .../datadog-plugin-dns/test/index.spec.js | 395 +++++++-------- .../test/integration-test/client.spec.js | 5 +- .../datadog-plugin-http/test/client.spec.js | 43 +- .../datadog-plugin-http/test/server.spec.js | 390 +++++++-------- .../datadog-plugin-http2/test/client.spec.js | 26 +- .../datadog-plugin-http2/test/server.spec.js | 320 ++++++------ .../datadog-plugin-net/test/index.spec.js | 454 +++++++++--------- packages/dd-trace/src/plugins/index.js | 5 + 19 files changed, 905 insertions(+), 823 deletions(-) diff --git a/packages/datadog-instrumentations/src/child-process.js b/packages/datadog-instrumentations/src/child-process.js index ba26dfdf7cf..3dca938ed42 100644 --- a/packages/datadog-instrumentations/src/child-process.js +++ b/packages/datadog-instrumentations/src/child-process.js @@ -9,11 +9,10 @@ const shimmer = require('../../datadog-shimmer') const childProcessChannel = channel('datadog:child_process:execution:start') const execMethods = ['exec', 'execFile', 'fork', 'spawn', 'execFileSync', 'execSync', 'spawnSync'] const names = ['child_process', 'node:child_process'] -names.forEach(name => { - addHook({ name }, childProcess => { - shimmer.massWrap(childProcess, execMethods, wrapChildProcessMethod()) - return childProcess - }) + +addHook({ name: names }, childProcess => { + shimmer.massWrap(childProcess, execMethods, wrapChildProcessMethod()) + return childProcess }) function wrapChildProcessMethod () { diff --git a/packages/datadog-instrumentations/src/crypto.js b/packages/datadog-instrumentations/src/crypto.js index 3113c16ef1d..7c95614cee7 100644 --- a/packages/datadog-instrumentations/src/crypto.js +++ b/packages/datadog-instrumentations/src/crypto.js @@ -11,8 +11,9 @@ const cryptoCipherCh = channel('datadog:crypto:cipher:start') const hashMethods = ['createHash', 'createHmac', 'createSign', 'createVerify', 'sign', 'verify'] const cipherMethods = ['createCipheriv', 'createDecipheriv'] +const names = ['crypto', 'node:crypto'] -addHook({ name: 'crypto' }, crypto => { +addHook({ name: names }, crypto => { shimmer.massWrap(crypto, hashMethods, wrapCryptoMethod(cryptoHashCh)) shimmer.massWrap(crypto, cipherMethods, wrapCryptoMethod(cryptoCipherCh)) return crypto diff --git a/packages/datadog-instrumentations/src/dns.js b/packages/datadog-instrumentations/src/dns.js index 7c4f18c22b7..de827ea5182 100644 --- a/packages/datadog-instrumentations/src/dns.js +++ b/packages/datadog-instrumentations/src/dns.js @@ -18,8 +18,9 @@ const rrtypes = { } const rrtypeMap = new WeakMap() +const names = ['dns', 'node:dns'] -addHook({ name: 'dns' }, dns => { +addHook({ name: names }, dns => { dns.lookup = wrap('apm:dns:lookup', dns.lookup, 2) dns.lookupService = wrap('apm:dns:lookup_service', dns.lookupService, 3) dns.resolve = wrap('apm:dns:resolve', dns.resolve, 2) diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js index ad572e41090..702084b23a9 100644 --- a/packages/datadog-instrumentations/src/helpers/hooks.js +++ b/packages/datadog-instrumentations/src/helpers/hooks.js @@ -31,7 +31,6 @@ module.exports = { 'bunyan': () => require('../bunyan'), 'cassandra-driver': () => require('../cassandra-driver'), 'child_process': () => require('../child-process'), - 'node:child_process': () => require('../child-process'), 'connect': () => require('../connect'), 'cookie': () => require('../cookie'), 'cookie-parser': () => require('../cookie-parser'), @@ -45,7 +44,6 @@ module.exports = { 'fastify': () => require('../fastify'), 'find-my-way': () => require('../find-my-way'), 'fs': () => require('../fs'), - 'node:fs': () => require('../fs'), 'generic-pool': () => require('../generic-pool'), 'graphql': () => require('../graphql'), 'grpc': () => require('../grpc'), @@ -79,6 +77,13 @@ module.exports = { 'mysql2': () => require('../mysql2'), 'net': () => require('../net'), 'next': () => require('../next'), + 'node:child_process': () => require('../child-process'), + 'node:crypto': () => require('../crypto'), + 'node:dns': () => require('../dns'), + 'node:http': () => require('../http'), + 'node:http2': () => require('../http2'), + 'node:https': () => require('../http'), + 'node:net': () => require('../net'), 'oracledb': () => require('../oracledb'), 'openai': () => require('../openai'), 'paperplane': () => require('../paperplane'), diff --git a/packages/datadog-instrumentations/src/helpers/instrument.js b/packages/datadog-instrumentations/src/helpers/instrument.js index 323c6b01624..0ca8b63df48 100644 --- a/packages/datadog-instrumentations/src/helpers/instrument.js +++ b/packages/datadog-instrumentations/src/helpers/instrument.js @@ -21,11 +21,16 @@ exports.channel = function (name) { * @param Function hook */ exports.addHook = function addHook ({ name, versions, file }, hook) { - if (!instrumentations[name]) { - instrumentations[name] = [] + if (typeof name === 'string') { + name = [name] } - instrumentations[name].push({ name, versions, file, hook }) + for (const val of name) { + if (!instrumentations[val]) { + instrumentations[val] = [] + } + instrumentations[val].push({ name: val, versions, file, hook }) + } } // AsyncResource.bind exists and binds `this` properly only from 17.8.0 and up. diff --git a/packages/datadog-instrumentations/src/helpers/register.js b/packages/datadog-instrumentations/src/helpers/register.js index e89a91b55f2..9fc22ca45f1 100644 --- a/packages/datadog-instrumentations/src/helpers/register.js +++ b/packages/datadog-instrumentations/src/helpers/register.js @@ -24,6 +24,7 @@ if (!disabledInstrumentations.has('fetch')) { require('../fetch') } +const HOOK_SYMBOL = Symbol('hookExportsMap') // TODO: make this more efficient for (const packageName of names) { @@ -42,14 +43,29 @@ for (const packageName of names) { for (const { name, file, versions, hook } of instrumentations[packageName]) { const fullFilename = filename(name, file) + // Create a WeakMap associated with the hook function so that patches on the same moduleExport only happens once + // for example by instrumenting both dns and node:dns double the spans would be created + // since they both patch the same moduleExport, this WeakMap is used to mitigate that + if (!hook[HOOK_SYMBOL]) { + hook[HOOK_SYMBOL] = new WeakMap() + } + if (moduleName === fullFilename) { const version = moduleVersion || getVersion(moduleBaseDir) if (matchVersion(version, versions)) { + // Check if the hook already has a set moduleExport + if (hook[HOOK_SYMBOL].has(moduleExports)) { + return moduleExports + } + try { loadChannel.publish({ name, version, file }) - - moduleExports = hook(moduleExports, version) + // Send the name and version of the module back to the callback because now addHook + // takes in an array of names so by passing the name the callback will know which module name is being used + moduleExports = hook(moduleExports, version, name) + // Set the moduleExports in the hooks weakmap + hook[HOOK_SYMBOL].set(moduleExports, name) } catch (e) { log.error(e) } diff --git a/packages/datadog-instrumentations/src/http/client.js b/packages/datadog-instrumentations/src/http/client.js index fcf5cc05f0a..89e621e642d 100644 --- a/packages/datadog-instrumentations/src/http/client.js +++ b/packages/datadog-instrumentations/src/http/client.js @@ -14,9 +14,9 @@ const endChannel = channel('apm:http:client:request:end') const asyncStartChannel = channel('apm:http:client:request:asyncStart') const errorChannel = channel('apm:http:client:request:error') -addHook({ name: 'https' }, hookFn) +const names = ['http', 'https', 'node:http', 'node:https'] -addHook({ name: 'http' }, hookFn) +addHook({ name: names }, hookFn) function hookFn (http) { patch(http, 'request') diff --git a/packages/datadog-instrumentations/src/http/server.js b/packages/datadog-instrumentations/src/http/server.js index f3eb528214f..680e6b8dcbf 100644 --- a/packages/datadog-instrumentations/src/http/server.js +++ b/packages/datadog-instrumentations/src/http/server.js @@ -15,14 +15,17 @@ const finishSetHeaderCh = channel('datadog:http:server:response:set-header:finis const requestFinishedSet = new WeakSet() -addHook({ name: 'https' }, http => { - // http.ServerResponse not present on https +const httpNames = ['http', 'node:http'] +const httpsNames = ['https', 'node:https'] + +addHook({ name: httpNames }, http => { + shimmer.wrap(http.ServerResponse.prototype, 'emit', wrapResponseEmit) shimmer.wrap(http.Server.prototype, 'emit', wrapEmit) return http }) -addHook({ name: 'http' }, http => { - shimmer.wrap(http.ServerResponse.prototype, 'emit', wrapResponseEmit) +addHook({ name: httpsNames }, http => { + // http.ServerResponse not present on https shimmer.wrap(http.Server.prototype, 'emit', wrapEmit) return http }) diff --git a/packages/datadog-instrumentations/src/http2/client.js b/packages/datadog-instrumentations/src/http2/client.js index de4957318ae..651c9ed6edd 100644 --- a/packages/datadog-instrumentations/src/http2/client.js +++ b/packages/datadog-instrumentations/src/http2/client.js @@ -10,6 +10,8 @@ const asyncStartChannel = channel('apm:http2:client:request:asyncStart') const asyncEndChannel = channel('apm:http2:client:request:asyncEnd') const errorChannel = channel('apm:http2:client:request:error') +const names = ['http2', 'node:http2'] + function createWrapEmit (ctx) { return function wrapEmit (emit) { return function (event, arg1) { @@ -66,7 +68,7 @@ function wrapConnect (connect) { } } -addHook({ name: 'http2' }, http2 => { +addHook({ name: names }, http2 => { shimmer.wrap(http2, 'connect', wrapConnect) return http2 diff --git a/packages/datadog-instrumentations/src/http2/server.js b/packages/datadog-instrumentations/src/http2/server.js index 6c9a290c7a7..07bfa11e453 100644 --- a/packages/datadog-instrumentations/src/http2/server.js +++ b/packages/datadog-instrumentations/src/http2/server.js @@ -14,7 +14,9 @@ const startServerCh = channel('apm:http2:server:request:start') const errorServerCh = channel('apm:http2:server:request:error') const finishServerCh = channel('apm:http2:server:request:finish') -addHook({ name: 'http2' }, http2 => { +const names = ['http2', 'node:http2'] + +addHook({ name: names }, http2 => { shimmer.wrap(http2, 'createSecureServer', wrapCreateServer) shimmer.wrap(http2, 'createServer', wrapCreateServer) return http2 diff --git a/packages/datadog-instrumentations/src/net.js b/packages/datadog-instrumentations/src/net.js index a5de6f511ba..e2e6ecaefe7 100644 --- a/packages/datadog-instrumentations/src/net.js +++ b/packages/datadog-instrumentations/src/net.js @@ -17,8 +17,16 @@ const errorTCPCh = channel('apm:net:tcp:error') const connectionCh = channel(`apm:net:tcp:connection`) -addHook({ name: 'net' }, net => { - require('dns') +const names = ['net', 'node:net'] + +addHook({ name: names }, (net, version, name) => { + // explicitly require dns so that net gets an instrumented instance + // so that we don't miss the dns calls + if (name === 'net') { + require('dns') + } else { + require('node:dns') + } shimmer.wrap(net.Socket.prototype, 'connect', connect => function () { if (!startICPCh.hasSubscribers || !startTCPCh.hasSubscribers) { diff --git a/packages/datadog-plugin-dns/test/index.spec.js b/packages/datadog-plugin-dns/test/index.spec.js index 5b2ab06ecec..3550e06a059 100644 --- a/packages/datadog-plugin-dns/test/index.spec.js +++ b/packages/datadog-plugin-dns/test/index.spec.js @@ -5,233 +5,236 @@ const { promisify } = require('util') const { storage } = require('../../datadog-core') const { ERROR_TYPE, ERROR_MESSAGE } = require('../../dd-trace/src/constants') +const PLUGINS = ['dns', 'node:dns'] + describe('Plugin', () => { let dns let tracer + PLUGINS.forEach(plugin => { + describe(plugin, () => { + afterEach(() => { + return agent.close() + }) - describe('dns', () => { - afterEach(() => { - return agent.close() - }) - - beforeEach(() => { - return agent.load('dns') - .then(() => { - dns = require('dns') - tracer = require('../../dd-trace') - }) - }) - - it('should instrument lookup', done => { - agent - .use(traces => { - expect(traces[0][0]).to.deep.include({ - name: 'dns.lookup', - service: 'test', - resource: 'localhost' + beforeEach(() => { + return agent.load('dns') + .then(() => { + dns = require(plugin) + tracer = require('../../dd-trace') }) - expect(traces[0][0].meta).to.deep.include({ - 'component': 'dns', - 'span.kind': 'client', - 'dns.hostname': 'localhost', - 'dns.address': '127.0.0.1' - }) - }) - .then(done) - .catch(done) - - dns.lookup('localhost', 4, (err, address, family) => err && done(err)) - }) + }) - it('should instrument lookup with all addresses', done => { - agent - .use(traces => { - expect(traces[0][0]).to.deep.include({ - name: 'dns.lookup', - service: 'test', - resource: 'localhost' - }) - expect(traces[0][0].meta).to.deep.include({ - 'component': 'dns', - 'span.kind': 'client', - 'dns.hostname': 'localhost', - 'dns.address': '127.0.0.1', - 'dns.addresses': '127.0.0.1,::1' - }) - }) - .then(done) - .catch(done) + it('should instrument lookup', done => { + agent + .use(traces => { + expect(traces[0][0]).to.deep.include({ + name: 'dns.lookup', + service: 'test', + resource: 'localhost' + }) + expect(traces[0][0].meta).to.deep.include({ + 'component': 'dns', + 'span.kind': 'client', + 'dns.hostname': 'localhost', + 'dns.address': '127.0.0.1' + }) + }) + .then(done) + .catch(done) + + dns.lookup('localhost', 4, (err, address, family) => err && done(err)) + }) - dns.lookup('localhost', { all: true }, (err, address, family) => err && done(err)) - }) + it('should instrument lookup with all addresses', done => { + agent + .use(traces => { + expect(traces[0][0]).to.deep.include({ + name: 'dns.lookup', + service: 'test', + resource: 'localhost' + }) + expect(traces[0][0].meta).to.deep.include({ + 'component': 'dns', + 'span.kind': 'client', + 'dns.hostname': 'localhost', + 'dns.address': '127.0.0.1', + 'dns.addresses': '127.0.0.1,::1' + }) + }) + .then(done) + .catch(done) + + dns.lookup('localhost', { all: true }, (err, address, family) => err && done(err)) + }) - it('should instrument errors correctly', done => { - agent - .use(traces => { - expect(traces[0][0]).to.deep.include({ - name: 'dns.lookup', - service: 'test', - resource: 'fakedomain.faketld', - error: 1 - }) - expect(traces[0][0].meta).to.deep.include({ - 'component': 'dns', - 'span.kind': 'client', - 'dns.hostname': 'fakedomain.faketld', - [ERROR_TYPE]: 'Error', - [ERROR_MESSAGE]: 'getaddrinfo ENOTFOUND fakedomain.faketld' - }) + it('should instrument errors correctly', done => { + agent + .use(traces => { + expect(traces[0][0]).to.deep.include({ + name: 'dns.lookup', + service: 'test', + resource: 'fakedomain.faketld', + error: 1 + }) + expect(traces[0][0].meta).to.deep.include({ + 'component': 'dns', + 'span.kind': 'client', + 'dns.hostname': 'fakedomain.faketld', + [ERROR_TYPE]: 'Error', + [ERROR_MESSAGE]: 'getaddrinfo ENOTFOUND fakedomain.faketld' + }) + }) + .then(done) + .catch(done) + + dns.lookup('fakedomain.faketld', 4, (err, address, family) => { + expect(err).to.not.be.null }) - .then(done) - .catch(done) + }) - dns.lookup('fakedomain.faketld', 4, (err, address, family) => { - expect(err).to.not.be.null + it('should instrument lookupService', done => { + agent + .use(traces => { + expect(traces[0][0]).to.deep.include({ + name: 'dns.lookup_service', + service: 'test', + resource: '127.0.0.1:22' + }) + expect(traces[0][0].meta).to.deep.include({ + 'component': 'dns', + 'span.kind': 'client', + 'dns.address': '127.0.0.1' + }) + expect(traces[0][0].metrics).to.deep.include({ + 'dns.port': 22 + }) + }) + .then(done) + .catch(done) + + dns.lookupService('127.0.0.1', 22, err => err && done(err)) }) - }) - it('should instrument lookupService', done => { - agent - .use(traces => { - expect(traces[0][0]).to.deep.include({ - name: 'dns.lookup_service', - service: 'test', - resource: '127.0.0.1:22' - }) - expect(traces[0][0].meta).to.deep.include({ - 'component': 'dns', - 'span.kind': 'client', - 'dns.address': '127.0.0.1' - }) - expect(traces[0][0].metrics).to.deep.include({ - 'dns.port': 22 - }) - }) - .then(done) - .catch(done) + it('should instrument resolve', done => { + agent + .use(traces => { + expect(traces[0][0]).to.deep.include({ + name: 'dns.resolve', + service: 'test', + resource: 'A lvh.me' + }) + expect(traces[0][0].meta).to.deep.include({ + 'component': 'dns', + 'span.kind': 'client', + 'dns.hostname': 'lvh.me', + 'dns.rrtype': 'A' + }) + }) + .then(done) + .catch(done) + + dns.resolve('lvh.me', err => err && done(err)) + }) - dns.lookupService('127.0.0.1', 22, err => err && done(err)) - }) + it('should instrument resolve shorthands', done => { + agent + .use(traces => { + expect(traces[0][0]).to.deep.include({ + name: 'dns.resolve', + service: 'test', + resource: 'ANY lvh.me' + }) + expect(traces[0][0].meta).to.deep.include({ + 'component': 'dns', + 'span.kind': 'client', + 'dns.hostname': 'lvh.me', + 'dns.rrtype': 'ANY' + }) + }) + .then(done) + .catch(done) + + dns.resolveAny('lvh.me', err => err && done(err)) + }) - it('should instrument resolve', done => { - agent - .use(traces => { - expect(traces[0][0]).to.deep.include({ - name: 'dns.resolve', - service: 'test', - resource: 'A lvh.me' - }) - expect(traces[0][0].meta).to.deep.include({ - 'component': 'dns', - 'span.kind': 'client', - 'dns.hostname': 'lvh.me', - 'dns.rrtype': 'A' - }) - }) - .then(done) - .catch(done) + it('should instrument reverse', done => { + agent + .use(traces => { + expect(traces[0][0]).to.deep.include({ + name: 'dns.reverse', + service: 'test', + resource: '127.0.0.1' + }) + expect(traces[0][0].meta).to.deep.include({ + 'component': 'dns', + 'span.kind': 'client', + 'dns.ip': '127.0.0.1' + }) + }) + .then(done) + .catch(done) + + dns.reverse('127.0.0.1', err => err && done(err)) + }) - dns.resolve('lvh.me', err => err && done(err)) - }) + it('should preserve the parent scope in the callback', done => { + const span = tracer.startSpan('dummySpan', {}) - it('should instrument resolve shorthands', done => { - agent - .use(traces => { - expect(traces[0][0]).to.deep.include({ - name: 'dns.resolve', - service: 'test', - resource: 'ANY lvh.me' - }) - expect(traces[0][0].meta).to.deep.include({ - 'component': 'dns', - 'span.kind': 'client', - 'dns.hostname': 'lvh.me', - 'dns.rrtype': 'ANY' - }) - }) - .then(done) - .catch(done) + tracer.scope().activate(span, () => { + dns.lookup('localhost', 4, (err) => { + if (err) return done(err) - dns.resolveAny('lvh.me', err => err && done(err)) - }) + expect(tracer.scope().active()).to.equal(span) - it('should instrument reverse', done => { - agent - .use(traces => { - expect(traces[0][0]).to.deep.include({ - name: 'dns.reverse', - service: 'test', - resource: '127.0.0.1' - }) - expect(traces[0][0].meta).to.deep.include({ - 'component': 'dns', - 'span.kind': 'client', - 'dns.ip': '127.0.0.1' + done() }) }) - .then(done) - .catch(done) - - dns.reverse('127.0.0.1', err => err && done(err)) - }) - - it('should preserve the parent scope in the callback', done => { - const span = tracer.startSpan('dummySpan', {}) - - tracer.scope().activate(span, () => { - dns.lookup('localhost', 4, (err) => { - if (err) return done(err) + }) - expect(tracer.scope().active()).to.equal(span) + it('should work with promisify', () => { + const lookup = promisify(dns.lookup) - done() + return lookup('localhost', 4).then(({ address, family }) => { + expect(address).to.equal('127.0.0.1') + expect(family).to.equal(4) }) }) - }) - - it('should work with promisify', () => { - const lookup = promisify(dns.lookup) - return lookup('localhost', 4).then(({ address, family }) => { - expect(address).to.equal('127.0.0.1') - expect(family).to.equal(4) + it('should instrument Resolver', done => { + const resolver = new dns.Resolver() + + agent + .use(traces => { + expect(traces[0][0]).to.deep.include({ + name: 'dns.resolve', + service: 'test', + resource: 'A lvh.me' + }) + expect(traces[0][0].meta).to.deep.include({ + 'component': 'dns', + 'dns.hostname': 'lvh.me', + 'dns.rrtype': 'A' + }) + }) + .then(done) + .catch(done) + + resolver.resolve('lvh.me', err => err && done(err)) }) - }) - it('should instrument Resolver', done => { - const resolver = new dns.Resolver() + it('should skip instrumentation for noop context', done => { + const resolver = new dns.Resolver() + const timer = setTimeout(done, 200) - agent - .use(traces => { - expect(traces[0][0]).to.deep.include({ - name: 'dns.resolve', - service: 'test', - resource: 'A lvh.me' + agent + .use(() => { + done(new Error('Resolve was traced.')) + clearTimeout(timer) }) - expect(traces[0][0].meta).to.deep.include({ - 'component': 'dns', - 'dns.hostname': 'lvh.me', - 'dns.rrtype': 'A' - }) - }) - .then(done) - .catch(done) - - resolver.resolve('lvh.me', err => err && done(err)) - }) - - it('should skip instrumentation for noop context', done => { - const resolver = new dns.Resolver() - const timer = setTimeout(done, 200) - agent - .use(() => { - done(new Error('Resolve was traced.')) - clearTimeout(timer) + storage.run({ noop: true }, () => { + resolver.resolve('lvh.me', () => {}) }) - - storage.run({ noop: true }, () => { - resolver.resolve('lvh.me', () => {}) }) }) }) diff --git a/packages/datadog-plugin-fastify/test/integration-test/client.spec.js b/packages/datadog-plugin-fastify/test/integration-test/client.spec.js index 4dce20e0255..581f512305b 100644 --- a/packages/datadog-plugin-fastify/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-fastify/test/integration-test/client.spec.js @@ -14,9 +14,8 @@ describe('esm', () => { let proc let sandbox - // TODO: fastify instrumentation breaks with esm for version 4.23.2 but works for commonJS, - // fix it and change the versions tested - withVersions('fastify', 'fastify', '^3', version => { + // skip older versions of fastify due to syntax differences + withVersions('fastify', 'fastify', '>=3', version => { before(async function () { this.timeout(20000) sandbox = await createSandbox([`'fastify@${version}'`], false, diff --git a/packages/datadog-plugin-http/test/client.spec.js b/packages/datadog-plugin-http/test/client.spec.js index 7256950ac83..5a48959892e 100644 --- a/packages/datadog-plugin-http/test/client.spec.js +++ b/packages/datadog-plugin-http/test/client.spec.js @@ -25,15 +25,21 @@ describe('Plugin', () => { let appListener let tracer - ['http', 'https'].forEach(protocol => { - describe(protocol, () => { + ['http', 'https', 'node:http', 'node:https'].forEach(pluginToBeLoaded => { + const protocol = pluginToBeLoaded.split(':')[1] || pluginToBeLoaded + describe(pluginToBeLoaded, () => { function server (app, port, listener) { let server - if (protocol === 'https') { + if (pluginToBeLoaded === 'https') { process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0' server = require('https').createServer({ key, cert }, app) - } else { + } else if (pluginToBeLoaded === 'node:https') { + process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0' + server = require('node:https').createServer({ key, cert }, app) + } else if (pluginToBeLoaded === 'http') { server = require('http').createServer(app) + } else { + server = require('node:http').createServer(app) } server.listen(port, 'localhost', listener) return server @@ -55,7 +61,7 @@ describe('Plugin', () => { beforeEach(() => { return agent.load('http', { server: false }) .then(() => { - http = require(protocol) + http = require(pluginToBeLoaded) express = require('express') }) }) @@ -904,8 +910,13 @@ describe('Plugin', () => { it('should only record a request once', done => { // Make sure both plugins are loaded, which could cause double-counting. - require('http') - require('https') + if (pluginToBeLoaded.includes('node:')) { + require('node:http') + require('node:https') + } else { + require('http') + require('https') + } const app = express() @@ -1072,7 +1083,7 @@ describe('Plugin', () => { ch = require('dc-polyfill').channel('apm:http:client:request:start') sub = () => {} tracer = require('../../dd-trace') - http = require(protocol) + http = require(pluginToBeLoaded) }) }) @@ -1119,7 +1130,7 @@ describe('Plugin', () => { return agent.load('http', config) .then(() => { - http = require(protocol) + http = require(pluginToBeLoaded) express = require('express') }) }) @@ -1160,7 +1171,7 @@ describe('Plugin', () => { return agent.load('http', config) .then(() => { - http = require(protocol) + http = require(pluginToBeLoaded) express = require('express') }) }) @@ -1209,7 +1220,7 @@ describe('Plugin', () => { return agent.load('http', config) .then(() => { - http = require(protocol) + http = require(pluginToBeLoaded) express = require('express') }) }) @@ -1254,7 +1265,7 @@ describe('Plugin', () => { return agent.load('http', config) .then(() => { - http = require(protocol) + http = require(pluginToBeLoaded) express = require('express') }) }) @@ -1326,7 +1337,7 @@ describe('Plugin', () => { return agent.load('http', config) .then(() => { - http = require(protocol) + http = require(pluginToBeLoaded) express = require('express') }) }) @@ -1439,7 +1450,7 @@ describe('Plugin', () => { return agent.load('http', config) .then(() => { - http = require(protocol) + http = require(pluginToBeLoaded) express = require('express') }) }) @@ -1485,7 +1496,7 @@ describe('Plugin', () => { return agent.load('http', config) .then(() => { - http = require(protocol) + http = require(pluginToBeLoaded) express = require('express') }) }) @@ -1532,7 +1543,7 @@ describe('Plugin', () => { return agent.load('http', config) .then(() => { - http = require(protocol) + http = require(pluginToBeLoaded) express = require('express') }) }) diff --git a/packages/datadog-plugin-http/test/server.spec.js b/packages/datadog-plugin-http/test/server.spec.js index 9a0135ea967..4f6f5a15ed3 100644 --- a/packages/datadog-plugin-http/test/server.spec.js +++ b/packages/datadog-plugin-http/test/server.spec.js @@ -15,248 +15,250 @@ describe('Plugin', () => { let port let app - describe('http/server', () => { - beforeEach(() => { - tracer = require('../../dd-trace') - listener = (req, res) => { - app && app(req, res) - res.writeHead(200) - res.end() - } - }) - - beforeEach(() => { - return getPort().then(newPort => { - port = newPort - }) - }) - - afterEach(() => { - appListener && appListener.close() - app = null - return agent.close({ ritmReset: false }) - }) - - describe('canceled request', () => { + ['http', 'node:http'].forEach(pluginToBeLoaded => { + describe(`${pluginToBeLoaded}/server`, () => { beforeEach(() => { + tracer = require('../../dd-trace') listener = (req, res) => { - setTimeout(() => { - app && app(req, res) - res.writeHead(200) - res.end() - }, 500) + app && app(req, res) + res.writeHead(200) + res.end() } }) beforeEach(() => { - return agent.load('http') - .then(() => { - http = require('http') - }) - }) - - beforeEach(done => { - const server = new http.Server(listener) - appListener = server - .listen(port, 'localhost', () => done()) + return getPort().then(newPort => { + port = newPort + }) }) - it('should send traces to agent', (done) => { - app = sinon.stub() - agent - .use(traces => { - expect(app).not.to.have.been.called // request should be cancelled before call to app - expect(traces[0][0]).to.have.property('name', 'web.request') - expect(traces[0][0]).to.have.property('service', 'test') - expect(traces[0][0]).to.have.property('type', 'web') - expect(traces[0][0]).to.have.property('resource', 'GET') - expect(traces[0][0].meta).to.have.property('span.kind', 'server') - expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/user`) - expect(traces[0][0].meta).to.have.property('http.method', 'GET') - expect(traces[0][0].meta).to.have.property('http.status_code', '200') - expect(traces[0][0].meta).to.have.property('component', 'http') - }) - .then(done) - .catch(done) - const source = axios.CancelToken.source() - axios.get(`http://localhost:${port}/user`, { cancelToken: source.token }) - .then(() => {}) - setTimeout(() => { source.cancel() }, 100) + afterEach(() => { + appListener && appListener.close() + app = null + return agent.close({ ritmReset: false }) }) - }) - describe('without configuration', () => { - beforeEach(() => { - return agent.load('http') - .then(() => { - http = require('http') - }) - }) + describe('canceled request', () => { + beforeEach(() => { + listener = (req, res) => { + setTimeout(() => { + app && app(req, res) + res.writeHead(200) + res.end() + }, 500) + } + }) - beforeEach(done => { - const server = new http.Server(listener) - appListener = server - .listen(port, 'localhost', () => done()) - }) + beforeEach(() => { + return agent.load('http') + .then(() => { + http = require(pluginToBeLoaded) + }) + }) - withNamingSchema( - done => { - axios.get(`http://localhost:${port}/user`).catch(done) - }, - rawExpectedSchema.server - ) - - it('should do automatic instrumentation', done => { - agent - .use(traces => { - expect(traces[0][0]).to.have.property('name', 'web.request') - expect(traces[0][0]).to.have.property('service', 'test') - expect(traces[0][0]).to.have.property('type', 'web') - expect(traces[0][0]).to.have.property('resource', 'GET') - expect(traces[0][0].meta).to.have.property('span.kind', 'server') - expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/user`) - expect(traces[0][0].meta).to.have.property('http.method', 'GET') - expect(traces[0][0].meta).to.have.property('http.status_code', '200') - expect(traces[0][0].meta).to.have.property('component', 'http') - }) - .then(done) - .catch(done) + beforeEach(done => { + const server = new http.Server(listener) + appListener = server + .listen(port, 'localhost', () => done()) + }) - axios.get(`http://localhost:${port}/user`).catch(done) + it('should send traces to agent', (done) => { + app = sinon.stub() + agent + .use(traces => { + expect(app).not.to.have.been.called // request should be cancelled before call to app + expect(traces[0][0]).to.have.property('name', 'web.request') + expect(traces[0][0]).to.have.property('service', 'test') + expect(traces[0][0]).to.have.property('type', 'web') + expect(traces[0][0]).to.have.property('resource', 'GET') + expect(traces[0][0].meta).to.have.property('span.kind', 'server') + expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/user`) + expect(traces[0][0].meta).to.have.property('http.method', 'GET') + expect(traces[0][0].meta).to.have.property('http.status_code', '200') + expect(traces[0][0].meta).to.have.property('component', 'http') + }) + .then(done) + .catch(done) + const source = axios.CancelToken.source() + axios.get(`http://localhost:${port}/user`, { cancelToken: source.token }) + .then(() => {}) + setTimeout(() => { source.cancel() }, 100) + }) }) - it('should run the request listener in the request scope', done => { - const spy = sinon.spy(() => { - expect(tracer.scope().active()).to.not.be.null + describe('without configuration', () => { + beforeEach(() => { + return agent.load('http') + .then(() => { + http = require(pluginToBeLoaded) + }) }) - incomingHttpRequestStart.subscribe(spy) + beforeEach(done => { + const server = new http.Server(listener) + appListener = server + .listen(port, 'localhost', () => done()) + }) - app = (req, res) => { - expect(tracer.scope().active()).to.not.be.null + withNamingSchema( + done => { + axios.get(`http://localhost:${port}/user`).catch(done) + }, + rawExpectedSchema.server + ) + + it('should do automatic instrumentation', done => { + agent + .use(traces => { + expect(traces[0][0]).to.have.property('name', 'web.request') + expect(traces[0][0]).to.have.property('service', 'test') + expect(traces[0][0]).to.have.property('type', 'web') + expect(traces[0][0]).to.have.property('resource', 'GET') + expect(traces[0][0].meta).to.have.property('span.kind', 'server') + expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/user`) + expect(traces[0][0].meta).to.have.property('http.method', 'GET') + expect(traces[0][0].meta).to.have.property('http.status_code', '200') + expect(traces[0][0].meta).to.have.property('component', 'http') + }) + .then(done) + .catch(done) - const abortController = new AbortController() - expect(spy).to.have.been.calledOnceWithExactly({ req, res, abortController }, incomingHttpRequestStart.name) + axios.get(`http://localhost:${port}/user`).catch(done) + }) - done() - } + it('should run the request listener in the request scope', done => { + const spy = sinon.spy(() => { + expect(tracer.scope().active()).to.not.be.null + }) - axios.get(`http://localhost:${port}/user`).catch(done) - }) + incomingHttpRequestStart.subscribe(spy) + + app = (req, res) => { + expect(tracer.scope().active()).to.not.be.null + + const abortController = new AbortController() + expect(spy).to.have.been.calledOnceWithExactly({ req, res, abortController }, incomingHttpRequestStart.name) - it(`should run the request's close event in the correct context`, done => { - app = (req, res) => { - req.on('close', () => { - expect(tracer.scope().active()).to.equal(null) done() - }) - } + } - axios.get(`http://localhost:${port}/user`).catch(done) - }) + axios.get(`http://localhost:${port}/user`).catch(done) + }) - it(`should run the response's close event in the correct context`, done => { - app = (req, res) => { - const span = tracer.scope().active() + it(`should run the request's close event in the correct context`, done => { + app = (req, res) => { + req.on('close', () => { + expect(tracer.scope().active()).to.equal(null) + done() + }) + } - res.on('close', () => { - expect(tracer.scope().active()).to.equal(span) - done() - }) - } + axios.get(`http://localhost:${port}/user`).catch(done) + }) - axios.get(`http://localhost:${port}/user`).catch(done) - }) + it(`should run the response's close event in the correct context`, done => { + app = (req, res) => { + const span = tracer.scope().active() - it(`should run the finish event in the correct context`, done => { - app = (req, res) => { - const span = tracer.scope().active() + res.on('close', () => { + expect(tracer.scope().active()).to.equal(span) + done() + }) + } - res.on('finish', () => { - expect(tracer.scope().active()).to.equal(span) - done() - }) - } + axios.get(`http://localhost:${port}/user`).catch(done) + }) - axios.get(`http://localhost:${port}/user`).catch(done) - }) + it(`should run the finish event in the correct context`, done => { + app = (req, res) => { + const span = tracer.scope().active() - it('should not instrument manually instantiated server responses', () => { - const { IncomingMessage, ServerResponse } = http + res.on('finish', () => { + expect(tracer.scope().active()).to.equal(span) + done() + }) + } - const req = new IncomingMessage() - const res = new ServerResponse(req) + axios.get(`http://localhost:${port}/user`).catch(done) + }) - expect(() => res.emit('finish')).to.not.throw() - }) + it('should not instrument manually instantiated server responses', () => { + const { IncomingMessage, ServerResponse } = http - it('should not cause `end` to be called multiple times', done => { - app = (req, res) => { - res.end = sinon.spy(res.end) + const req = new IncomingMessage() + const res = new ServerResponse(req) - res.on('finish', () => { - expect(res.end).to.have.been.calledOnce - done() - }) - } + expect(() => res.emit('finish')).to.not.throw() + }) - axios.get(`http://localhost:${port}/user`).catch(done) - }) - }) + it('should not cause `end` to be called multiple times', done => { + app = (req, res) => { + res.end = sinon.spy(res.end) - describe('with a `server` configuration', () => { - beforeEach(() => { - return agent.load('http', { client: false, server: {} }) - .then(() => { - http = require('http') - }) - }) + res.on('finish', () => { + expect(res.end).to.have.been.calledOnce + done() + }) + } - beforeEach(done => { - const server = new http.Server(listener) - appListener = server - .listen(port, 'localhost', () => done()) + axios.get(`http://localhost:${port}/user`).catch(done) + }) }) - // see https://github.com/DataDog/dd-trace-js/issues/2453 - it('should not have disabled tracing', (done) => { - agent.use(() => {}) - .then(done) - .catch(done) + describe('with a `server` configuration', () => { + beforeEach(() => { + return agent.load('http', { client: false, server: {} }) + .then(() => { + http = require(pluginToBeLoaded) + }) + }) - axios.get(`http://localhost:${port}/user`).catch(done) - }) - }) + beforeEach(done => { + const server = new http.Server(listener) + appListener = server + .listen(port, 'localhost', () => done()) + }) - describe('with a blocklist configuration', () => { - beforeEach(() => { - return agent.load('http', { client: false, blocklist: '/health' }) - .then(() => { - http = require('http') - }) - }) + // see https://github.com/DataDog/dd-trace-js/issues/2453 + it('should not have disabled tracing', (done) => { + agent.use(() => {}) + .then(done) + .catch(done) - beforeEach(done => { - const server = new http.Server(listener) - appListener = server - .listen(port, 'localhost', () => done()) + axios.get(`http://localhost:${port}/user`).catch(done) + }) }) - it('should drop traces for blocklist route', done => { - const spy = sinon.spy(() => {}) + describe('with a blocklist configuration', () => { + beforeEach(() => { + return agent.load('http', { client: false, blocklist: '/health' }) + .then(() => { + http = require(pluginToBeLoaded) + }) + }) - agent - .use((traces) => { - spy() - }) - .catch(done) + beforeEach(done => { + const server = new http.Server(listener) + appListener = server + .listen(port, 'localhost', () => done()) + }) - setTimeout(() => { - expect(spy).to.not.have.been.called - done() - }, 100) + it('should drop traces for blocklist route', done => { + const spy = sinon.spy(() => {}) - axios.get(`http://localhost:${port}/health`).catch(done) + agent + .use((traces) => { + spy() + }) + .catch(done) + + setTimeout(() => { + expect(spy).to.not.have.been.called + done() + }, 100) + + axios.get(`http://localhost:${port}/health`).catch(done) + }) }) }) }) diff --git a/packages/datadog-plugin-http2/test/client.spec.js b/packages/datadog-plugin-http2/test/client.spec.js index 89ec4cb1ab3..7877be9c427 100644 --- a/packages/datadog-plugin-http2/test/client.spec.js +++ b/packages/datadog-plugin-http2/test/client.spec.js @@ -20,15 +20,17 @@ describe('Plugin', () => { let appListener let tracer - ['http', 'https'].forEach(protocol => { - describe(`http2/client, protocol ${protocol}`, () => { + ['http', 'https', 'node:http', 'node:https'].forEach(pluginToBeLoaded => { + const protocol = pluginToBeLoaded.split(':')[1] || pluginToBeLoaded + const loadPlugin = pluginToBeLoaded.includes('node:') ? 'node:http2' : 'http2' + describe(`http2/client, protocol ${pluginToBeLoaded}`, () => { function server (app, port, listener) { let server - if (protocol === 'https') { + if (pluginToBeLoaded === 'https' || pluginToBeLoaded === 'node:https') { process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0' - server = require('http2').createSecureServer({ key, cert }) + server = require(loadPlugin).createSecureServer({ key, cert }) } else { - server = require('http2').createServer() + server = require(loadPlugin).createServer() } server.on('stream', app) server.listen(port, 'localhost', listener) @@ -51,7 +53,7 @@ describe('Plugin', () => { beforeEach(() => { return agent.load('http2', { server: false }) .then(() => { - http2 = require('http2') + http2 = require(loadPlugin) }) }) @@ -629,7 +631,7 @@ describe('Plugin', () => { }) it('should only record a request once', done => { - require('http2') + require(loadPlugin) const app = (stream, headers) => { stream.respond({ ':status': 200 @@ -682,7 +684,7 @@ describe('Plugin', () => { return agent.load('http2', config) .then(() => { - http2 = require('http2') + http2 = require(loadPlugin) }) }) @@ -729,7 +731,7 @@ describe('Plugin', () => { return agent.load('http2', config) .then(() => { - http2 = require('http2') + http2 = require(loadPlugin) }) }) @@ -777,7 +779,7 @@ describe('Plugin', () => { return agent.load('http2', config) .then(() => { - http2 = require('http2') + http2 = require(loadPlugin) }) }) @@ -856,7 +858,7 @@ describe('Plugin', () => { return agent.load('http2', config) .then(() => { - http2 = require('http2') + http2 = require(loadPlugin) }) }) @@ -905,7 +907,7 @@ describe('Plugin', () => { return agent.load('http2', config) .then(() => { - http2 = require('http2') + http2 = require(loadPlugin) }) }) diff --git a/packages/datadog-plugin-http2/test/server.spec.js b/packages/datadog-plugin-http2/test/server.spec.js index 47e54c2a29e..2c2be7175bb 100644 --- a/packages/datadog-plugin-http2/test/server.spec.js +++ b/packages/datadog-plugin-http2/test/server.spec.js @@ -51,201 +51,203 @@ describe('Plugin', () => { let port let app - describe('http2/server', () => { - beforeEach(() => { - tracer = require('../../dd-trace') - listener = (req, res) => { - app && app(req, res) - res.writeHead(200) - res.end() - } - }) - - beforeEach(() => { - return getPort().then(newPort => { - port = newPort - }) - }) - - afterEach(() => { - appListener && appListener.close() - app = null - return agent.close({ ritmReset: false }) - }) - - describe('cancelled request', () => { + ['http2', 'node:http2'].forEach(pluginToBeLoaded => { + describe(`${pluginToBeLoaded}/server`, () => { beforeEach(() => { + tracer = require('../../dd-trace') listener = (req, res) => { - setTimeout(() => { - app && app(req, res) - res.writeHead(200) - res.end() - }, 500) + app && app(req, res) + res.writeHead(200) + res.end() } }) beforeEach(() => { - return agent.load('http2') - .then(() => { - http2 = require('http2') - }) + return getPort().then(newPort => { + port = newPort + }) }) - beforeEach(done => { - const server = http2.createServer(listener) - appListener = server - .listen(port, 'localhost', () => done()) + afterEach(() => { + appListener && appListener.close() + app = null + return agent.close({ ritmReset: false }) }) - it('should send traces to agent', (done) => { - app = sinon.stub() - agent - .use(traces => { - expect(app).not.to.have.been.called // request should be cancelled before call to app - expect(traces[0][0]).to.have.property('name', 'web.request') - expect(traces[0][0]).to.have.property('service', 'test') - expect(traces[0][0]).to.have.property('type', 'web') - expect(traces[0][0]).to.have.property('resource', 'GET') - expect(traces[0][0].meta).to.have.property('span.kind', 'server') - expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/user`) - expect(traces[0][0].meta).to.have.property('http.method', 'GET') - expect(traces[0][0].meta).to.have.property('http.status_code', '200') - expect(traces[0][0].meta).to.have.property('component', 'http2') - }) - .then(done) - .catch(done) + describe('cancelled request', () => { + beforeEach(() => { + listener = (req, res) => { + setTimeout(() => { + app && app(req, res) + res.writeHead(200) + res.end() + }, 500) + } + }) - // Don't use real AbortController because it requires 15.x+ - const ac = new MockAbortController() - request(http2, `http://localhost:${port}/user`, { - signal: ac.signal + beforeEach(() => { + return agent.load('http2') + .then(() => { + http2 = require(pluginToBeLoaded) + }) }) - setTimeout(() => { ac.abort() }, 100) - }) - }) - describe('without configuration', () => { - beforeEach(() => { - return agent.load('http2') - .then(() => { - http2 = require('http2') - }) - }) + beforeEach(done => { + const server = http2.createServer(listener) + appListener = server + .listen(port, 'localhost', () => done()) + }) - beforeEach(done => { - const server = http2.createServer(listener) - appListener = server - .listen(port, 'localhost', () => done()) + it('should send traces to agent', (done) => { + app = sinon.stub() + agent + .use(traces => { + expect(app).not.to.have.been.called // request should be cancelled before call to app + expect(traces[0][0]).to.have.property('name', 'web.request') + expect(traces[0][0]).to.have.property('service', 'test') + expect(traces[0][0]).to.have.property('type', 'web') + expect(traces[0][0]).to.have.property('resource', 'GET') + expect(traces[0][0].meta).to.have.property('span.kind', 'server') + expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/user`) + expect(traces[0][0].meta).to.have.property('http.method', 'GET') + expect(traces[0][0].meta).to.have.property('http.status_code', '200') + expect(traces[0][0].meta).to.have.property('component', 'http2') + }) + .then(done) + .catch(done) + + // Don't use real AbortController because it requires 15.x+ + const ac = new MockAbortController() + request(http2, `http://localhost:${port}/user`, { + signal: ac.signal + }) + setTimeout(() => { ac.abort() }, 100) + }) }) - const spanProducerFn = (done) => { - request(http2, `http://localhost:${port}/user`).catch(done) - } - - withNamingSchema( - spanProducerFn, - rawExpectedSchema.server - ) - - it('should do automatic instrumentation', done => { - agent - .use(traces => { - expect(traces[0][0]).to.have.property('name', 'web.request') - expect(traces[0][0]).to.have.property('service', 'test') - expect(traces[0][0]).to.have.property('type', 'web') - expect(traces[0][0]).to.have.property('resource', 'GET') - expect(traces[0][0].meta).to.have.property('span.kind', 'server') - expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/user`) - expect(traces[0][0].meta).to.have.property('http.method', 'GET') - expect(traces[0][0].meta).to.have.property('http.status_code', '200') - expect(traces[0][0].meta).to.have.property('component', 'http2') - }) - .then(done) - .catch(done) + describe('without configuration', () => { + beforeEach(() => { + return agent.load('http2') + .then(() => { + http2 = require(pluginToBeLoaded) + }) + }) - request(http2, `http://localhost:${port}/user`).catch(done) - }) + beforeEach(done => { + const server = http2.createServer(listener) + appListener = server + .listen(port, 'localhost', () => done()) + }) - it(`should run the request's close event in the correct context`, done => { - app = (req, res) => { - req.on('close', () => { - expect(tracer.scope().active()).to.equal(null) - done() - }) + const spanProducerFn = (done) => { + request(http2, `http://localhost:${port}/user`).catch(done) } - request(http2, `http://localhost:${port}/user`).catch(done) - }) + withNamingSchema( + spanProducerFn, + rawExpectedSchema.server + ) + + it('should do automatic instrumentation', done => { + agent + .use(traces => { + expect(traces[0][0]).to.have.property('name', 'web.request') + expect(traces[0][0]).to.have.property('service', 'test') + expect(traces[0][0]).to.have.property('type', 'web') + expect(traces[0][0]).to.have.property('resource', 'GET') + expect(traces[0][0].meta).to.have.property('span.kind', 'server') + expect(traces[0][0].meta).to.have.property('http.url', `http://localhost:${port}/user`) + expect(traces[0][0].meta).to.have.property('http.method', 'GET') + expect(traces[0][0].meta).to.have.property('http.status_code', '200') + expect(traces[0][0].meta).to.have.property('component', 'http2') + }) + .then(done) + .catch(done) + + request(http2, `http://localhost:${port}/user`).catch(done) + }) - it(`should run the response's close event in the correct context`, done => { - app = (req, res) => { - const span = tracer.scope().active() + it(`should run the request's close event in the correct context`, done => { + app = (req, res) => { + req.on('close', () => { + expect(tracer.scope().active()).to.equal(null) + done() + }) + } - res.on('close', () => { - expect(tracer.scope().active()).to.equal(span) - done() - }) - } + request(http2, `http://localhost:${port}/user`).catch(done) + }) - request(http2, `http://localhost:${port}/user`).catch(done) - }) + it(`should run the response's close event in the correct context`, done => { + app = (req, res) => { + const span = tracer.scope().active() - it(`should run the finish event in the correct context`, done => { - app = (req, res) => { - const span = tracer.scope().active() + res.on('close', () => { + expect(tracer.scope().active()).to.equal(span) + done() + }) + } - res.on('finish', () => { - expect(tracer.scope().active()).to.equal(span) - done() - }) - } + request(http2, `http://localhost:${port}/user`).catch(done) + }) - request(http2, `http://localhost:${port}/user`).catch(done) - }) + it(`should run the finish event in the correct context`, done => { + app = (req, res) => { + const span = tracer.scope().active() - it('should not cause `end` to be called multiple times', done => { - app = (req, res) => { - res.end = sinon.spy(res.end) + res.on('finish', () => { + expect(tracer.scope().active()).to.equal(span) + done() + }) + } - res.on('finish', () => { - expect(res.end).to.have.been.calledOnce - done() - }) - } + request(http2, `http://localhost:${port}/user`).catch(done) + }) - request(http2, `http://localhost:${port}/user`).catch(done) - }) - }) + it('should not cause `end` to be called multiple times', done => { + app = (req, res) => { + res.end = sinon.spy(res.end) - describe('with a blocklist configuration', () => { - beforeEach(() => { - return agent.load('http2', { client: false, blocklist: '/health' }) - .then(() => { - http2 = require('http2') - }) - }) + res.on('finish', () => { + expect(res.end).to.have.been.calledOnce + done() + }) + } - beforeEach(done => { - const server = http2.createServer(listener) - appListener = server - .listen(port, 'localhost', () => done()) + request(http2, `http://localhost:${port}/user`).catch(done) + }) }) - it('should drop traces for blocklist route', done => { - const spy = sinon.spy(() => {}) + describe('with a blocklist configuration', () => { + beforeEach(() => { + return agent.load('http2', { client: false, blocklist: '/health' }) + .then(() => { + http2 = require(pluginToBeLoaded) + }) + }) + + beforeEach(done => { + const server = http2.createServer(listener) + appListener = server + .listen(port, 'localhost', () => done()) + }) - agent - .use((traces) => { - spy() - }) - .catch(done) + it('should drop traces for blocklist route', done => { + const spy = sinon.spy(() => {}) - setTimeout(() => { - expect(spy).to.not.have.been.called - done() - }, 100) + agent + .use((traces) => { + spy() + }) + .catch(done) - request(http2, `http://localhost:${port}/health`).catch(done) + setTimeout(() => { + expect(spy).to.not.have.been.called + done() + }, 100) + + request(http2, `http://localhost:${port}/health`).catch(done) + }) }) }) }) diff --git a/packages/datadog-plugin-net/test/index.spec.js b/packages/datadog-plugin-net/test/index.spec.js index 7f1cf5d3e33..09532ad0502 100644 --- a/packages/datadog-plugin-net/test/index.spec.js +++ b/packages/datadog-plugin-net/test/index.spec.js @@ -15,274 +15,290 @@ describe('Plugin', () => { let tracer let parent - describe('net', () => { - afterEach(() => { - return agent.close() - }) - - afterEach(() => { - tcp.close() - }) + ['net', 'node:net'].forEach(pluginToBeLoaded => { + describe(pluginToBeLoaded, () => { + afterEach(() => { + return agent.close() + }) - afterEach(() => { - ipc.close() - }) + afterEach(() => { + tcp.close() + }) - beforeEach(() => { - return agent.load('net') - .then(() => { - net = require(`net`) - tracer = require('../../dd-trace') - parent = tracer.startSpan('parent') - parent.finish() + afterEach(() => { + ipc.close() + }) - return getPort() - }).then(_port => { - port = _port + beforeEach(() => { + return agent.load(['net', 'dns']) + .then(() => { + net = require(pluginToBeLoaded) + tracer = require('../../dd-trace') + parent = tracer.startSpan('parent') + parent.finish() - return new Promise(resolve => setImmediate(resolve)) - }) - }) + return getPort() + }).then(_port => { + port = _port - beforeEach(done => { - tcp = new net.Server(socket => { - socket.write('') + return new Promise(resolve => setImmediate(resolve)) + }) }) - tcp.listen(port, () => done()) - }) - beforeEach(done => { - ipc = new net.Server(socket => { - socket.write('') + beforeEach(done => { + tcp = new net.Server(socket => { + socket.write('') + }) + tcp.listen(port, () => done()) }) - ipc.listen('/tmp/dd-trace.sock', () => done()) - }) - it('should instrument connect with a path', done => { - expectSomeSpan(agent, { - name: 'ipc.connect', - service: 'test', - resource: '/tmp/dd-trace.sock', - meta: { - 'span.kind': 'client', - 'ipc.path': '/tmp/dd-trace.sock' - }, - parent_id: new Int64BE(parent.context()._spanId._buffer) - }).then(done).catch(done) - - tracer.scope().activate(parent, () => { - net.connect('/tmp/dd-trace.sock') + beforeEach(done => { + ipc = new net.Server(socket => { + socket.write('') + }) + ipc.listen('/tmp/dd-trace.sock', () => done()) }) - }) - withPeerService( - () => tracer, - 'net', - () => { - const socket = new net.Socket() - socket.connect(port, 'localhost') - }, - 'localhost', - 'out.host' - ) - - it('should instrument connect with a port', done => { - const socket = new net.Socket() - tracer.scope().activate(parent, () => { - socket.connect(port, 'localhost') - socket.on('connect', () => { - expectSomeSpan(agent, { - name: 'tcp.connect', - service: 'test', - resource: `localhost:${port}`, - meta: { - 'component': 'net', - 'span.kind': 'client', - 'tcp.family': 'IPv4', - 'tcp.remote.host': 'localhost', - 'tcp.local.address': socket.localAddress, - 'out.host': 'localhost' - }, - metrics: { - 'network.destination.port': port, - 'tcp.remote.port': port, - 'tcp.local.port': socket.localPort - }, - parent_id: new Int64BE(parent.context()._spanId._buffer) - }, 2000).then(done).catch(done) + it('should instrument connect with a path', done => { + expectSomeSpan(agent, { + name: 'ipc.connect', + service: 'test', + resource: '/tmp/dd-trace.sock', + meta: { + 'span.kind': 'client', + 'ipc.path': '/tmp/dd-trace.sock' + }, + parent_id: new Int64BE(parent.context()._spanId._buffer) + }).then(done).catch(done) + + tracer.scope().activate(parent, () => { + net.connect('/tmp/dd-trace.sock') }) }) - }) - it('should instrument connect with TCP options', done => { - const socket = new net.Socket() - tracer.scope().activate(parent, () => { - socket.connect({ - port, - host: 'localhost' - }) - socket.on('connect', () => { - expectSomeSpan(agent, { - name: 'tcp.connect', - service: 'test', - resource: `localhost:${port}`, - meta: { - 'component': 'net', - 'span.kind': 'client', - 'tcp.family': 'IPv4', - 'tcp.remote.host': 'localhost', - 'tcp.local.address': socket.localAddress, - 'out.host': 'localhost' - }, - metrics: { - 'network.destination.port': port, - 'tcp.remote.port': port, - 'tcp.local.port': socket.localPort - }, - parent_id: new Int64BE(parent.context()._spanId._buffer) - }).then(done).catch(done) + it('should instrument dns', done => { + const socket = new net.Socket() + tracer.scope().activate(parent, () => { + socket.connect(port, 'localhost') + socket.on('connect', () => { + expectSomeSpan(agent, { + name: 'dns.lookup', + service: 'test', + resource: 'localhost' + }, 2000).then(done).catch(done) + }) }) }) - }) - it('should instrument connect with IPC options', done => { - expectSomeSpan(agent, { - name: 'ipc.connect', - service: 'test', - resource: '/tmp/dd-trace.sock', - meta: { - 'component': 'net', - 'span.kind': 'client', - 'ipc.path': '/tmp/dd-trace.sock' + withPeerService( + () => tracer, + 'net', + () => { + const socket = new net.Socket() + socket.connect(port, 'localhost') }, - parent_id: new Int64BE(parent.context()._spanId._buffer) - }).then(done).catch(done) + 'localhost', + 'out.host' + ) - tracer.scope().activate(parent, () => { - net.connect({ - path: '/tmp/dd-trace.sock' + it('should instrument connect with a port', done => { + const socket = new net.Socket() + tracer.scope().activate(parent, () => { + socket.connect(port, 'localhost') + socket.on('connect', () => { + expectSomeSpan(agent, { + name: 'tcp.connect', + service: 'test', + resource: `localhost:${port}`, + meta: { + 'component': 'net', + 'span.kind': 'client', + 'tcp.family': 'IPv4', + 'tcp.remote.host': 'localhost', + 'tcp.local.address': socket.localAddress, + 'out.host': 'localhost' + }, + metrics: { + 'network.destination.port': port, + 'tcp.remote.port': port, + 'tcp.local.port': socket.localPort + }, + parent_id: new Int64BE(parent.context()._spanId._buffer) + }, 2000).then(done).catch(done) + }) }) }) - }) - - it('should instrument error', done => { - const socket = new net.Socket() - - let error = null - agent - .use(traces => { - expect(traces[0][0]).to.deep.include({ - name: 'tcp.connect', - service: 'test', - resource: `localhost:${port}` - }) - expect(traces[0][0].meta).to.deep.include({ - 'component': 'net', - 'span.kind': 'client', - 'tcp.family': 'IPv4', - 'tcp.remote.host': 'localhost', - 'out.host': 'localhost', - [ERROR_TYPE]: error.name, - [ERROR_MESSAGE]: error.message || error.code, - [ERROR_STACK]: error.stack + it('should instrument connect with TCP options', done => { + const socket = new net.Socket() + tracer.scope().activate(parent, () => { + socket.connect({ + port, + host: 'localhost' }) - expect(traces[0][0].metrics).to.deep.include({ - 'network.destination.port': port, - 'tcp.remote.port': port + socket.on('connect', () => { + expectSomeSpan(agent, { + name: 'tcp.connect', + service: 'test', + resource: `localhost:${port}`, + meta: { + 'component': 'net', + 'span.kind': 'client', + 'tcp.family': 'IPv4', + 'tcp.remote.host': 'localhost', + 'tcp.local.address': socket.localAddress, + 'out.host': 'localhost' + }, + metrics: { + 'network.destination.port': port, + 'tcp.remote.port': port, + 'tcp.local.port': socket.localPort + }, + parent_id: new Int64BE(parent.context()._spanId._buffer) + }).then(done).catch(done) }) - expect(traces[0][0].parent_id.toString()).to.equal(parent.context().toSpanId()) }) - .then(done) - .catch(done) + }) - tracer.scope().activate(parent, () => { - tcp.close() - socket.connect({ port }) - socket.once('error', (err) => { - error = err + it('should instrument connect with IPC options', done => { + expectSomeSpan(agent, { + name: 'ipc.connect', + service: 'test', + resource: '/tmp/dd-trace.sock', + meta: { + 'component': 'net', + 'span.kind': 'client', + 'ipc.path': '/tmp/dd-trace.sock' + }, + parent_id: new Int64BE(parent.context()._spanId._buffer) + }).then(done).catch(done) + + tracer.scope().activate(parent, () => { + net.connect({ + path: '/tmp/dd-trace.sock' + }) }) }) - }) - - it('should cleanup event listeners when the socket changes state', done => { - const socket = new net.Socket() - tracer.scope().activate(parent, () => { - const events = ['connect', 'error', 'close', 'timeout'] + it('should instrument error', done => { + const socket = new net.Socket() - socket.connect({ port }) - socket.destroy() + let error = null - socket.once('close', () => { - expect(socket.eventNames()).to.not.include.members(events) - done() + agent + .use(traces => { + expect(traces[0][0]).to.deep.include({ + name: 'tcp.connect', + service: 'test', + resource: `localhost:${port}` + }) + expect(traces[0][0].meta).to.deep.include({ + 'component': 'net', + 'span.kind': 'client', + 'tcp.family': 'IPv4', + 'tcp.remote.host': 'localhost', + 'out.host': 'localhost', + [ERROR_TYPE]: error.name, + [ERROR_MESSAGE]: error.message || error.code, + [ERROR_STACK]: error.stack + }) + expect(traces[0][0].metrics).to.deep.include({ + 'network.destination.port': port, + 'tcp.remote.port': port + }) + expect(traces[0][0].parent_id.toString()).to.equal(parent.context().toSpanId()) + }) + .then(done) + .catch(done) + + tracer.scope().activate(parent, () => { + tcp.close() + socket.connect({ port }) + socket.once('error', (err) => { + error = err + }) }) }) - }) - it('should run event listeners in the correct scope', () => { - return tracer.scope().activate(parent, () => { + it('should cleanup event listeners when the socket changes state', done => { const socket = new net.Socket() - const promises = Array(5).fill(0).map(() => { - let res - let rej - const p = new Promise((resolve, reject) => { - res = resolve - rej = reject + tracer.scope().activate(parent, () => { + const events = ['connect', 'error', 'close', 'timeout'] + + socket.connect({ port }) + socket.destroy() + + socket.once('close', () => { + expect(socket.eventNames()).to.not.include.members(events) + done() }) - p.resolve = res - p.reject = rej - return p }) + }) - socket.on('connect', () => { - expect(tracer.scope().active()).to.equal(parent) - promises[0].resolve() - }) + it('should run event listeners in the correct scope', () => { + return tracer.scope().activate(parent, () => { + const socket = new net.Socket() + + const promises = Array(5).fill(0).map(() => { + let res + let rej + const p = new Promise((resolve, reject) => { + res = resolve + rej = reject + }) + p.resolve = res + p.reject = rej + return p + }) - socket.on('ready', () => { - expect(tracer.scope().active()).to.equal(parent) - socket.destroy() - promises[1].resolve() - }) + socket.on('connect', () => { + expect(tracer.scope().active()).to.equal(parent) + promises[0].resolve() + }) - socket.on('close', () => { - expect(tracer.scope().active()).to.not.be.null - expect(tracer.scope().active().context()._name).to.equal('tcp.connect') - promises[2].resolve() - }) + socket.on('ready', () => { + expect(tracer.scope().active()).to.equal(parent) + socket.destroy() + promises[1].resolve() + }) - socket.on('lookup', () => { - expect(tracer.scope().active()).to.not.be.null - expect(tracer.scope().active().context()._name).to.equal('tcp.connect') - promises[3].resolve() - }) + socket.on('close', () => { + expect(tracer.scope().active()).to.not.be.null + expect(tracer.scope().active().context()._name).to.equal('tcp.connect') + promises[2].resolve() + }) - socket.connect({ - port, - lookup: (...args) => { + socket.on('lookup', () => { expect(tracer.scope().active()).to.not.be.null expect(tracer.scope().active().context()._name).to.equal('tcp.connect') - promises[4].resolve() - dns.lookup(...args) - } - }) + promises[3].resolve() + }) + + socket.connect({ + port, + lookup: (...args) => { + expect(tracer.scope().active()).to.not.be.null + expect(tracer.scope().active().context()._name).to.equal('tcp.connect') + promises[4].resolve() + dns.lookup(...args) + } + }) - return Promise.all(promises) + return Promise.all(promises) + }) }) - }) - it('should run the connection callback in the correct scope', done => { - const socket = new net.Socket() + it('should run the connection callback in the correct scope', done => { + const socket = new net.Socket() - tracer.scope().activate(parent, () => { - socket.connect({ port }, function () { - expect(this).to.equal(socket) - expect(tracer.scope().active()).to.equal(parent) - socket.destroy() - done() + tracer.scope().activate(parent, () => { + socket.connect({ port }, function () { + expect(this).to.equal(socket) + expect(tracer.scope().active()).to.equal(parent) + socket.destroy() + done() + }) }) }) }) diff --git a/packages/dd-trace/src/plugins/index.js b/packages/dd-trace/src/plugins/index.js index d2a22cd8b15..c7c96df0f50 100644 --- a/packages/dd-trace/src/plugins/index.js +++ b/packages/dd-trace/src/plugins/index.js @@ -59,6 +59,11 @@ module.exports = { get 'mysql2' () { return require('../../../datadog-plugin-mysql2/src') }, get 'net' () { return require('../../../datadog-plugin-net/src') }, get 'next' () { return require('../../../datadog-plugin-next/src') }, + get 'node:dns' () { return require('../../../datadog-plugin-dns/src') }, + get 'node:http' () { return require('../../../datadog-plugin-http/src') }, + get 'node:http2' () { return require('../../../datadog-plugin-http2/src') }, + get 'node:https' () { return require('../../../datadog-plugin-http/src') }, + get 'node:net' () { return require('../../../datadog-plugin-net/src') }, get 'oracledb' () { return require('../../../datadog-plugin-oracledb/src') }, get 'openai' () { return require('../../../datadog-plugin-openai/src') }, get 'paperplane' () { return require('../../../datadog-plugin-paperplane/src') }, From 26ee65099a9d92ce6375bef0fb1074ac3733974f Mon Sep 17 00:00:00 2001 From: Ayan Khan Date: Wed, 27 Dec 2023 15:22:47 -0500 Subject: [PATCH 138/147] simplify ci build for aerospike (#3886) * simplify ci build for aerospike --- .github/workflows/plugins.yml | 55 +++++++++++++++-------------------- 1 file changed, 24 insertions(+), 31 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 59f2dcd3934..7c6f1f10280 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -59,11 +59,7 @@ jobs: majorVersion=$(echo "$version" | cut -d '.' -f 1) echo "Major Version: $majorVersion" echo "MAJOR_VERSION=$majorVersion" >> $GITHUB_ENV - - name: Check package version - if: env.MAJOR_VERSION == '3' - run: | - echo "Package version is 3. Proceeding with the next steps." - - name: Install dependencies + - name: Install dependencies and run tests if: env.MAJOR_VERSION == '3' run: | apt-get update && \ @@ -72,14 +68,11 @@ jobs: wget \ g++ libssl1.0.0 libssl-dev zlib1g-dev && \ npm install -g yarn - - if: env.MAJOR_VERSION == '3' - run: yarn install --ignore-engines - - if: env.MAJOR_VERSION == '3' - uses: ./.github/actions/node/14 - - if: env.MAJOR_VERSION == '3' - run: yarn test:plugins:ci - - if: env.MAJOR_VERSION == '3' + yarn install --ignore-engines + yarn test:plugins:ci + - if: always() uses: codecov/codecov-action@v2 + aerospike-4: runs-on: ubuntu-latest services: @@ -101,7 +94,15 @@ jobs: - if: always() uses: ./.github/actions/testagent/logs - uses: codecov/codecov-action@v2 + aerospike-5: + strategy: + matrix: + node-version: [16] + range: ['5.5.0 - 5.7.0'] + include: + - node-version: 20 + range: '>=5.8.0' runs-on: ubuntu-latest services: aerospike: @@ -111,7 +112,7 @@ jobs: env: PLUGINS: aerospike SERVICES: aerospike - PACKAGE_VERSION_RANGE: '5.5.0 - 5.7.0' + PACKAGE_VERSION_RANGE: ${{ matrix.range }} steps: - uses: actions/checkout@v2 - uses: ./.github/actions/testagent/start @@ -126,26 +127,18 @@ jobs: majorVersion=$(echo "$version" | cut -d '.' -f 1) echo "Major Version: $majorVersion" echo "MAJOR_VERSION=$majorVersion" >> $GITHUB_ENV - - name: Check package version + - uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + - name: Install dependencies and run tests if: env.MAJOR_VERSION != '3' run: | - echo "Package version is not 3. Proceeding with the next steps." - - if: env.MAJOR_VERSION != '3' - run: yarn install --ignore-engines - - if: env.MAJOR_VERSION != '3' - uses: ./.github/actions/node/oldest - - if: env.MAJOR_VERSION != '3' - run: yarn test:plugins:ci - - if: env.MAJOR_VERSION != '3' - run: echo "PACKAGE_VERSION_RANGE=>=5.8.0" >> "$GITHUB_ENV" - - if: env.MAJOR_VERSION != '3' - uses: ./.github/actions/node/20 # currently the latest version of aerospike only supports node 20 - - if: env.MAJOR_VERSION != '3' - run: yarn test:plugins:ci - - if: env.MAJOR_VERSION != '3' + yarn install --ignore-engines + yarn test:plugins:ci + - if: always() uses: ./.github/actions/testagent/logs - - if: env.MAJOR_VERSION != '3' - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v2 + amqp10: # TODO: move rhea to its own job runs-on: ubuntu-latest services: @@ -1247,4 +1240,4 @@ jobs: - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - if: always() - uses: ./.github/actions/testagent/logs + uses: ./.github/actions/testagent/logs \ No newline at end of file From 825327b606393ccec3765d4f5393dd54e8c52bd0 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Thu, 28 Dec 2023 22:35:06 +0800 Subject: [PATCH 139/147] Fix net plugin tests (#3906) --- packages/datadog-plugin-net/test/index.spec.js | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/datadog-plugin-net/test/index.spec.js b/packages/datadog-plugin-net/test/index.spec.js index 09532ad0502..0f47b0c9034 100644 --- a/packages/datadog-plugin-net/test/index.spec.js +++ b/packages/datadog-plugin-net/test/index.spec.js @@ -230,8 +230,14 @@ describe('Plugin', () => { socket.destroy() socket.once('close', () => { - expect(socket.eventNames()).to.not.include.members(events) - done() + setImmediate(() => { + // Node.js 21.2 broke this function. We'll have to do the more manual way for now. + // expect(socket.eventNames()).to.not.include.members(events) + for (const event of events) { + expect(socket.listeners(event)).to.have.lengthOf(0) + } + done() + }) }) }) }) From 5a373f3a24b4ffe08088492626d22cbdd030a165 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Thu, 28 Dec 2023 22:47:28 +0800 Subject: [PATCH 140/147] Fix integration tests by pinned chai to v4 as v5 went ESM-only (#3909) --- integration-tests/ci-visibility.spec.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index b9cf69c4c41..fa07ff4a29a 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -38,7 +38,7 @@ const mochaCommonOptions = { const jestCommonOptions = { name: 'jest', - dependencies: ['jest', 'chai', 'jest-jasmine2'], + dependencies: ['jest', 'chai@v4', 'jest-jasmine2'], expectedStdout: 'Test Suites: 2 passed', expectedCoverageFiles: [ 'ci-visibility/test/sum.js', @@ -51,7 +51,7 @@ const testFrameworks = [ { ...mochaCommonOptions, testFile: 'ci-visibility/run-mocha.js', - dependencies: ['mocha', 'chai', 'nyc'], + dependencies: ['mocha', 'chai@v4', 'nyc'], expectedCoverageFiles: [ 'ci-visibility/run-mocha.js', 'ci-visibility/test/sum.js', @@ -64,7 +64,7 @@ const testFrameworks = [ { ...mochaCommonOptions, testFile: 'ci-visibility/run-mocha.mjs', - dependencies: ['mocha', 'chai', 'nyc', '@istanbuljs/esm-loader-hook'], + dependencies: ['mocha', 'chai@v4', 'nyc', '@istanbuljs/esm-loader-hook'], expectedCoverageFiles: [ 'ci-visibility/run-mocha.mjs', 'ci-visibility/test/sum.js', From 18f378bcb69f21d7a912eaba6a79a65bcee8df2b Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Thu, 28 Dec 2023 23:23:01 +0800 Subject: [PATCH 141/147] Add install_signature to app-started telemetry event (#3903) --- packages/dd-trace/src/config.js | 19 +++++++++++++++++++ packages/dd-trace/src/telemetry/index.js | 15 +++++++++++++++ packages/dd-trace/test/config.spec.js | 11 +++++++++++ .../dd-trace/test/telemetry/index.spec.js | 15 ++++++++++++++- 4 files changed, 59 insertions(+), 1 deletion(-) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 40c9c68d091..1d5d4c17e7c 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -522,6 +522,19 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) 0 ) + const DD_INSTRUMENTATION_INSTALL_ID = coalesce( + process.env.DD_INSTRUMENTATION_INSTALL_ID, + null + ) + const DD_INSTRUMENTATION_INSTALL_TIME = coalesce( + process.env.DD_INSTRUMENTATION_INSTALL_TIME, + null + ) + const DD_INSTRUMENTATION_INSTALL_TYPE = coalesce( + process.env.DD_INSTRUMENTATION_INSTALL_TYPE, + null + ) + const ingestion = options.ingestion || {} const dogstatsd = coalesce(options.dogstatsd, {}) const sampler = { @@ -671,6 +684,12 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) this.spanLeakDebug = Number(DD_TRACE_SPAN_LEAK_DEBUG) + this.installSignature = { + id: DD_INSTRUMENTATION_INSTALL_ID, + time: DD_INSTRUMENTATION_INSTALL_TIME, + type: DD_INSTRUMENTATION_INSTALL_TYPE + } + this._applyDefaults() this._applyEnvironment() this._applyOptions(options) diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js index f7a300309b6..a9a741b8539 100644 --- a/packages/dd-trace/src/telemetry/index.js +++ b/packages/dd-trace/src/telemetry/index.js @@ -112,11 +112,26 @@ function flatten (input, result = [], prefix = [], traversedObjects = null) { return result } +function getInstallSignature (config) { + const { installSignature: sig } = config + if (sig && (sig.id || sig.time || sig.type)) { + return { + install_id: sig.id, + install_time: sig.time, + install_type: sig.type + } + } +} + function appStarted (config) { const app = { products: getProducts(config), configuration: flatten(config) } + const installSignature = getInstallSignature(config) + if (installSignature) { + app.install_signature = installSignature + } // TODO: add app.error with correct error codes // if (errors.agentError) { // app.error = errors.agentError diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 21047b5c894..6b14a307039 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -121,6 +121,9 @@ describe('Config', () => { expect(config).to.have.nested.property('iast.redactionNamePattern', null) expect(config).to.have.nested.property('iast.redactionValuePattern', null) expect(config).to.have.nested.property('iast.telemetryVerbosity', 'INFORMATION') + expect(config).to.have.nested.property('installSignature.id', null) + expect(config).to.have.nested.property('installSignature.time', null) + expect(config).to.have.nested.property('installSignature.type', null) }) it('should support logging', () => { @@ -229,6 +232,9 @@ describe('Config', () => { process.env.DD_EXPERIMENTAL_PROFILING_ENABLED = 'true' process.env.DD_EXPERIMENTAL_API_SECURITY_ENABLED = 'true' process.env.DD_API_SECURITY_REQUEST_SAMPLE_RATE = 1 + process.env.DD_INSTRUMENTATION_INSTALL_ID = '68e75c48-57ca-4a12-adfc-575c4b05fcbe' + process.env.DD_INSTRUMENTATION_INSTALL_TYPE = 'k8s_single_step' + process.env.DD_INSTRUMENTATION_INSTALL_TIME = '1703188212' const config = new Config() @@ -308,6 +314,11 @@ describe('Config', () => { expect(config).to.have.nested.property('iast.redactionNamePattern', 'REDACTION_NAME_PATTERN') expect(config).to.have.nested.property('iast.redactionValuePattern', 'REDACTION_VALUE_PATTERN') expect(config).to.have.nested.property('iast.telemetryVerbosity', 'DEBUG') + expect(config).to.have.deep.property('installSignature', { + id: '68e75c48-57ca-4a12-adfc-575c4b05fcbe', + type: 'k8s_single_step', + time: '1703188212' + }) }) it('should read case-insensitive booleans from environment variables', () => { diff --git a/packages/dd-trace/test/telemetry/index.spec.js b/packages/dd-trace/test/telemetry/index.spec.js index ea6f974d6d3..5bb42a8c7c3 100644 --- a/packages/dd-trace/test/telemetry/index.spec.js +++ b/packages/dd-trace/test/telemetry/index.spec.js @@ -74,6 +74,11 @@ describe('telemetry', () => { peerServiceMapping: { 'service_1': 'remapped_service_1', 'service_2': 'remapped_service_2' + }, + installSignature: { + id: '68e75c48-57ca-4a12-adfc-575c4b05fcbe', + type: 'k8s_single_step', + time: '1703188212' } }, { _pluginsByName: pluginsByName @@ -105,8 +110,16 @@ describe('telemetry', () => { { name: 'appsec.enabled', value: true, origin: 'unknown' }, { name: 'profiling.enabled', value: true, origin: 'unknown' }, { name: 'peerServiceMapping.service_1', value: 'remapped_service_1', origin: 'unknown' }, - { name: 'peerServiceMapping.service_2', value: 'remapped_service_2', origin: 'unknown' } + { name: 'peerServiceMapping.service_2', value: 'remapped_service_2', origin: 'unknown' }, + { name: 'installSignature.id', value: '68e75c48-57ca-4a12-adfc-575c4b05fcbe', origin: 'unknown' }, + { name: 'installSignature.type', value: 'k8s_single_step', origin: 'unknown' }, + { name: 'installSignature.time', value: '1703188212', origin: 'unknown' } ]) + expect(payload).to.have.property('install_signature').that.deep.equal({ + install_id: '68e75c48-57ca-4a12-adfc-575c4b05fcbe', + install_type: 'k8s_single_step', + install_time: '1703188212' + }) }) }) From d482f2d9aafd0b1448a9844de620714d10be3d43 Mon Sep 17 00:00:00 2001 From: Nicolas Savoire Date: Thu, 28 Dec 2023 16:25:15 +0100 Subject: [PATCH 142/147] Fix compatibility with node < 14.18 (#3908) Node 14 versions prior to 14.18 do not support require statements with `node:` prefix. --- integration-tests/profiler.spec.js | 8 ++++---- integration-tests/profiler/nettest.js | 3 +-- packages/dd-trace/src/profiling/profilers/events.js | 2 +- packages/dd-trace/src/profiling/profilers/shared.js | 2 +- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index 8be691fe592..76685bc30be 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -8,10 +8,10 @@ const childProcess = require('child_process') const { fork } = childProcess const path = require('path') const { assert } = require('chai') -const fs = require('node:fs/promises') -const fsync = require('node:fs') -const net = require('node:net') -const zlib = require('node:zlib') +const fs = require('fs/promises') +const fsync = require('fs') +const net = require('net') +const zlib = require('zlib') const { Profile } = require('pprof-format') const semver = require('semver') diff --git a/integration-tests/profiler/nettest.js b/integration-tests/profiler/nettest.js index b98bc7d55f3..e9f3002d6b0 100644 --- a/integration-tests/profiler/nettest.js +++ b/integration-tests/profiler/nettest.js @@ -1,5 +1,4 @@ -const net = require('node:net') -const process = require('node:process') +const net = require('net') async function streamToString (stream) { const chunks = [] diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index 5c743bb96b2..45ee6f94009 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -1,4 +1,4 @@ -const { performance, constants, PerformanceObserver } = require('node:perf_hooks') +const { performance, constants, PerformanceObserver } = require('perf_hooks') const { END_TIMESTAMP_LABEL } = require('./shared') const semver = require('semver') const { Function, Label, Line, Location, Profile, Sample, StringTable, ValueType } = require('pprof-format') diff --git a/packages/dd-trace/src/profiling/profilers/shared.js b/packages/dd-trace/src/profiling/profilers/shared.js index 4337a80ae29..31dc7b2ce34 100644 --- a/packages/dd-trace/src/profiling/profilers/shared.js +++ b/packages/dd-trace/src/profiling/profilers/shared.js @@ -1,6 +1,6 @@ 'use strict' -const { isMainThread, threadId } = require('node:worker_threads') +const { isMainThread, threadId } = require('worker_threads') const END_TIMESTAMP_LABEL = 'end_timestamp_ns' const THREAD_NAME_LABEL = 'thread name' From 5b780fc1ea4283b934d95d712815e4ba75d5e9c1 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Fri, 29 Dec 2023 00:27:15 +0800 Subject: [PATCH 143/147] Fix timeouts from aws-sdk kinesis tests (#3910) --- packages/datadog-plugin-aws-sdk/test/kinesis.spec.js | 2 +- packages/datadog-plugin-aws-sdk/test/kinesis_helpers.js | 8 +++----- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js index db8177370c0..d3b6221d65a 100644 --- a/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/kinesis.spec.js @@ -41,7 +41,7 @@ describe('Kinesis', () => { }, (err, res) => { if (err) return done(err) - helpers.waitForActiveStream(this, kinesis, done) + helpers.waitForActiveStream(kinesis, done) }) }) diff --git a/packages/datadog-plugin-aws-sdk/test/kinesis_helpers.js b/packages/datadog-plugin-aws-sdk/test/kinesis_helpers.js index f76e6119251..8f91daab67d 100644 --- a/packages/datadog-plugin-aws-sdk/test/kinesis_helpers.js +++ b/packages/datadog-plugin-aws-sdk/test/kinesis_helpers.js @@ -45,17 +45,15 @@ function putTestRecord (kinesis, data, cb) { }, cb) } -function waitForActiveStream (mocha, kinesis, cb) { +function waitForActiveStream (kinesis, cb) { kinesis.describeStream({ StreamName: 'MyStream' }, (err, data) => { if (err) { - mocha.timeout(2000) - return waitForActiveStream(mocha, kinesis, cb) + return waitForActiveStream(kinesis, cb) } if (data.StreamDescription.StreamStatus !== 'ACTIVE') { - mocha.timeout(2000) - return waitForActiveStream(mocha, kinesis, cb) + return waitForActiveStream(kinesis, cb) } cb() From aff6d0223a0842d6f7f6baa2f3b86ae7ba5a7789 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 30 Dec 2023 00:19:42 +0800 Subject: [PATCH 144/147] Update actions versions (#3907) * Bump github actions versions * Stop using deprecated set-output command * Fix incompatible GLIBC version in GHA --- .github/actions/testagent/start/action.yml | 2 +- .github/workflows/appsec.yml | 48 ++-- .../workflows/ci-visibility-performance.yml | 2 +- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/core.yml | 4 +- .github/workflows/lambda.yml | 4 +- .github/workflows/package-size.yml | 2 +- .github/workflows/plugins.yml | 230 +++++++++--------- .github/workflows/profiling.yml | 12 +- .github/workflows/project.yml | 10 +- .github/workflows/release-3.yml | 8 +- .github/workflows/release-dev.yml | 6 +- .github/workflows/release-latest.yml | 14 +- .github/workflows/release-proposal.yml | 2 +- .../workflows/serverless-integration-test.yml | 2 +- .github/workflows/serverless-performance.yml | 4 +- .github/workflows/system-tests.yml | 8 +- .github/workflows/test-k8s-lib-injection.yaml | 2 +- .github/workflows/tracing.yml | 12 +- 19 files changed, 188 insertions(+), 186 deletions(-) diff --git a/.github/actions/testagent/start/action.yml b/.github/actions/testagent/start/action.yml index e5865983986..6f59559648e 100644 --- a/.github/actions/testagent/start/action.yml +++ b/.github/actions/testagent/start/action.yml @@ -3,6 +3,6 @@ description: "Starts the APM Test Agent image with environment." runs: using: composite steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - run: docker-compose up -d testagent shell: bash diff --git a/.github/workflows/appsec.yml b/.github/workflows/appsec.yml index a0e22b28ff8..2a1e2440b56 100644 --- a/.github/workflows/appsec.yml +++ b/.github/workflows/appsec.yml @@ -15,16 +15,16 @@ jobs: macos: runs-on: macos-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - run: yarn test:appsec:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 ubuntu: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/16 @@ -33,16 +33,16 @@ jobs: - run: yarn test:appsec:ci - uses: ./.github/actions/node/latest - run: yarn test:appsec:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 windows: runs-on: windows-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - run: yarn test:appsec:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 ldapjs: runs-on: ubuntu-latest @@ -60,14 +60,14 @@ jobs: LDAP_USERS: 'user01,user02' LDAP_PASSWORDS: 'password1,password2' steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/oldest - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:appsec:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 postgres: runs-on: ubuntu-latest @@ -83,7 +83,7 @@ jobs: PLUGINS: pg|knex SERVICES: postgres steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/oldest @@ -94,7 +94,7 @@ jobs: - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/20 - run: yarn test:appsec:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 mysql: runs-on: ubuntu-latest @@ -110,7 +110,7 @@ jobs: PLUGINS: mysql|mysql2|sequelize SERVICES: mysql steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/16 @@ -119,35 +119,35 @@ jobs: - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/20 - run: yarn test:appsec:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 express: runs-on: ubuntu-latest env: PLUGINS: express|body-parser|cookie-parser steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/oldest - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:appsec:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 graphql: runs-on: ubuntu-latest env: PLUGINS: apollo-server|apollo-server-express|apollo-server-fastify|apollo-server-core steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/oldest - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:appsec:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 mongodb-core: runs-on: ubuntu-latest @@ -160,14 +160,14 @@ jobs: PLUGINS: express-mongo-sanitize SERVICES: mongo steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/oldest - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:appsec:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 mongoose: runs-on: ubuntu-latest @@ -180,21 +180,21 @@ jobs: PLUGINS: mongoose SERVICES: mongo steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/oldest - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:appsec:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 sourcing: runs-on: ubuntu-latest env: PLUGINS: cookie steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/16 @@ -205,7 +205,7 @@ jobs: - run: yarn test:appsec:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:appsec:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 next: strategy: @@ -220,7 +220,7 @@ jobs: PLUGINS: next RANGE: ${{ matrix.range }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - uses: actions/setup-node@v3 @@ -230,4 +230,4 @@ jobs: - run: yarn test:appsec:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 diff --git a/.github/workflows/ci-visibility-performance.yml b/.github/workflows/ci-visibility-performance.yml index c399c9b3096..2a24980b4d5 100644 --- a/.github/workflows/ci-visibility-performance.yml +++ b/.github/workflows/ci-visibility-performance.yml @@ -19,7 +19,7 @@ jobs: env: ROBOT_CI_GITHUB_PERSONAL_ACCESS_TOKEN: ${{ secrets.ROBOT_CI_GITHUB_PERSONAL_ACCESS_TOKEN }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/18 - name: CI Visibility Performance Overhead Test run: yarn bench:e2e:ci-visibility diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index af37ccf7d90..51af025df84 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -34,7 +34,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/core.yml b/.github/workflows/core.yml index e8661d9652b..322725b0f34 100644 --- a/.github/workflows/core.yml +++ b/.github/workflows/core.yml @@ -15,11 +15,11 @@ jobs: shimmer: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/oldest - run: yarn test:shimmer:ci - uses: ./.github/actions/node/latest - run: yarn test:shimmer:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 diff --git a/.github/workflows/lambda.yml b/.github/workflows/lambda.yml index 2600cc157f0..f98b74914e5 100644 --- a/.github/workflows/lambda.yml +++ b/.github/workflows/lambda.yml @@ -15,7 +15,7 @@ jobs: ubuntu: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -29,4 +29,4 @@ jobs: - run: yarn test:lambda:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 diff --git a/.github/workflows/package-size.yml b/.github/workflows/package-size.yml index a29c22f29cb..4b2934a20d1 100644 --- a/.github/workflows/package-size.yml +++ b/.github/workflows/package-size.yml @@ -13,7 +13,7 @@ jobs: package-size-report: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Setup Node.js uses: actions/setup-node@v2 with: diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 7c6f1f10280..d76e3f55a3f 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -45,14 +45,15 @@ jobs: DD_TEST_AGENT_URL: http://testagent:9126 AEROSPIKE_HOST_ADDRESS: aerospike steps: - - uses: actions/checkout@v2 + # Needs to remain on v3 for now due to GLIBC version + - uses: actions/checkout@v3 - uses: actions/setup-node@v3 with: node-version: '14' - id: pkg run: | content=`cat ./package.json | tr '\n' ' '` - echo "::set-output name=json::$content" + echo "json=$content" >> $GITHUB_OUTPUT - id: extract run: | version="${{fromJson(steps.pkg.outputs.json).version}}" @@ -71,7 +72,7 @@ jobs: yarn install --ignore-engines yarn test:plugins:ci - if: always() - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v3 aerospike-4: runs-on: ubuntu-latest @@ -85,7 +86,7 @@ jobs: SERVICES: aerospike PACKAGE_VERSION_RANGE: '4.0.0 - 5.4.0' steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install --ignore-engines @@ -93,7 +94,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 aerospike-5: strategy: @@ -114,13 +115,13 @@ jobs: SERVICES: aerospike PACKAGE_VERSION_RANGE: ${{ matrix.range }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - id: pkg run: | content=`cat ./package.json | tr '\n' ' '` - echo "::set-output name=json::$content" + echo "json=$content" >> $GITHUB_OUTPUT - id: extract run: | version="${{fromJson(steps.pkg.outputs.json).version}}" @@ -137,7 +138,7 @@ jobs: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 amqp10: # TODO: move rhea to its own job runs-on: ubuntu-latest @@ -153,7 +154,7 @@ jobs: PLUGINS: amqp10|rhea SERVICES: qpid steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -165,7 +166,7 @@ jobs: - run: yarn test:plugins:upstream - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 amqplib: runs-on: ubuntu-latest @@ -178,7 +179,7 @@ jobs: PLUGINS: amqplib SERVICES: rabbitmq steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -190,7 +191,7 @@ jobs: - run: yarn test:plugins:upstream - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 aws-sdk: runs-on: ubuntu-latest @@ -228,7 +229,7 @@ jobs: PLUGINS: aws-sdk SERVICES: localstack localstack-legacy steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -238,14 +239,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 axios: runs-on: ubuntu-latest env: PLUGINS: axios steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -255,14 +256,14 @@ jobs: - run: yarn test:plugins:upstream - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 bluebird: runs-on: ubuntu-latest env: PLUGINS: bluebird steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -270,7 +271,7 @@ jobs: - run: yarn test:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 - if: always() uses: ./.github/actions/testagent/logs @@ -279,7 +280,7 @@ jobs: env: PLUGINS: bunyan steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -291,7 +292,7 @@ jobs: - run: yarn test:plugins:upstream - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 cassandra: runs-on: ubuntu-latest @@ -306,7 +307,7 @@ jobs: PLUGINS: cassandra-driver SERVICES: cassandra steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -316,7 +317,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 couchbase: runs-on: ubuntu-latest @@ -330,20 +331,20 @@ jobs: PLUGINS: couchbase SERVICES: couchbase steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/oldest - run: yarn test:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 connect: runs-on: ubuntu-latest env: PLUGINS: connect steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -355,14 +356,14 @@ jobs: - run: yarn test:plugins:upstream - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 cucumber: runs-on: ubuntu-latest env: PLUGINS: cucumber steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -372,7 +373,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 # TODO: fix performance issues and test more Node versions cypress: @@ -380,21 +381,21 @@ jobs: env: PLUGINS: cypress steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 dns: runs-on: ubuntu-latest env: PLUGINS: dns steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -408,7 +409,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 elasticsearch: runs-on: ubuntu-latest @@ -423,7 +424,7 @@ jobs: PLUGINS: elasticsearch SERVICES: elasticsearch steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -431,14 +432,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 express: runs-on: ubuntu-latest env: PLUGINS: express|body-parser|cookie-parser steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -448,14 +449,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 fastify: runs-on: ubuntu-latest env: PLUGINS: fastify steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -465,14 +466,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 fetch: runs-on: ubuntu-latest env: PLUGINS: fetch steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -482,14 +483,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 generic-pool: runs-on: ubuntu-latest env: PLUGINS: generic-pool steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -499,7 +500,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 google-cloud-pubsub: runs-on: ubuntu-latest @@ -512,7 +513,7 @@ jobs: PLUGINS: google-cloud-pubsub SERVICES: gpubsub steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -522,14 +523,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 graphql: runs-on: ubuntu-latest env: PLUGINS: graphql steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -541,14 +542,14 @@ jobs: - run: yarn test:plugins:upstream - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 grpc: runs-on: ubuntu-latest env: PLUGINS: grpc steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -558,14 +559,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 hapi: runs-on: ubuntu-latest env: PLUGINS: hapi steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -575,14 +576,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 http: runs-on: ubuntu-latest env: PLUGINS: http steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -596,14 +597,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 http2: runs-on: ubuntu-latest env: PLUGINS: http2 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -617,7 +618,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 # TODO: fix performance issues and test more Node versions jest: @@ -625,14 +626,14 @@ jobs: env: PLUGINS: jest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 kafkajs: runs-on: ubuntu-latest @@ -653,7 +654,7 @@ jobs: PLUGINS: kafkajs SERVICES: kafka steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -663,14 +664,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 knex: runs-on: ubuntu-latest env: PLUGINS: knex steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -680,14 +681,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 koa: runs-on: ubuntu-latest env: PLUGINS: koa steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -699,7 +700,7 @@ jobs: - run: yarn test:plugins:upstream - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 limitd-client: runs-on: ubuntu-latest @@ -716,7 +717,7 @@ jobs: PLUGINS: limitd-client SERVICES: limitd steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -726,7 +727,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 memcached: runs-on: ubuntu-latest @@ -739,7 +740,7 @@ jobs: PLUGINS: memcached SERVICES: memcached steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -749,14 +750,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 microgateway-core: runs-on: ubuntu-latest env: PLUGINS: microgateway-core steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -766,14 +767,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 mocha: runs-on: ubuntu-latest env: PLUGINS: mocha steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -781,7 +782,7 @@ jobs: - run: yarn test:plugins:ci - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 - if: always() uses: ./.github/actions/testagent/logs @@ -790,7 +791,7 @@ jobs: env: PLUGINS: moleculer steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -800,7 +801,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 mongodb-core: runs-on: ubuntu-latest @@ -813,7 +814,7 @@ jobs: PLUGINS: mongodb-core|express-mongo-sanitize SERVICES: mongo steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -823,7 +824,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 mongoose: runs-on: ubuntu-latest @@ -836,7 +837,7 @@ jobs: PLUGINS: mongoose SERVICES: mongo steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -846,7 +847,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 mysql: runs-on: ubuntu-latest @@ -862,7 +863,7 @@ jobs: PLUGINS: mysql|mysql2|mariadb # TODO: move mysql2 to its own job SERVICES: mysql steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -872,14 +873,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 net: runs-on: ubuntu-latest env: PLUGINS: net steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -893,7 +894,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 # TODO: fix performance issues and test more Node versions next: @@ -909,7 +910,7 @@ jobs: PLUGINS: next RANGE: ${{ matrix.range }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - uses: actions/setup-node@v3 @@ -919,14 +920,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 openai: runs-on: ubuntu-latest env: PLUGINS: openai steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -936,7 +937,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 opensearch: runs-on: ubuntu-latest @@ -952,7 +953,7 @@ jobs: PLUGINS: opensearch SERVICES: opensearch steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -962,7 +963,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 # TODO: Install the Oracle client on the host and test Node >=16. # TODO: Figure out why nyc stopped working with EACCESS errors. @@ -991,19 +992,20 @@ jobs: SERVICES: oracledb DD_TEST_AGENT_URL: http://testagent:9126 steps: - - uses: actions/checkout@v2 + # Needs to remain on v3 for now due to GLIBC version + - uses: actions/checkout@v3 - uses: ./.github/actions/node/setup - run: yarn install --ignore-engines - run: yarn services - run: yarn test:plugins - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 paperplane: runs-on: ubuntu-latest env: PLUGINS: paperplane steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1011,7 +1013,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 # TODO: re-enable upstream tests if it ever stops being flaky pino: @@ -1019,7 +1021,7 @@ jobs: env: PLUGINS: pino steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1030,7 +1032,7 @@ jobs: # - run: yarn test:plugins:upstream - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 postgres: runs-on: ubuntu-latest @@ -1046,7 +1048,7 @@ jobs: PLUGINS: pg SERVICES: postgres steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1056,14 +1058,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 promise: runs-on: ubuntu-latest env: PLUGINS: promise steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1075,14 +1077,14 @@ jobs: - run: yarn test:plugins:upstream - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 promise-js: runs-on: ubuntu-latest env: PLUGINS: promise-js steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1092,14 +1094,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 q: runs-on: ubuntu-latest env: PLUGINS: q steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1109,7 +1111,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 redis: runs-on: ubuntu-latest @@ -1122,7 +1124,7 @@ jobs: PLUGINS: redis|ioredis # TODO: move ioredis to its own job SERVICES: redis steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1132,14 +1134,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 restify: runs-on: ubuntu-latest env: PLUGINS: restify steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1149,14 +1151,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 router: runs-on: ubuntu-latest env: PLUGINS: router steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1166,14 +1168,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 sharedb: runs-on: ubuntu-latest env: PLUGINS: sharedb steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1181,7 +1183,7 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 tedious: runs-on: ubuntu-latest @@ -1198,7 +1200,7 @@ jobs: PLUGINS: tedious SERVICES: mssql steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1207,14 +1209,14 @@ jobs: - run: yarn test:plugins:upstream - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 when: runs-on: ubuntu-latest env: PLUGINS: when steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1224,14 +1226,14 @@ jobs: - run: yarn test:plugins:ci - if: always() uses: ./.github/actions/testagent/logs - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 winston: runs-on: ubuntu-latest env: PLUGINS: winston steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start - uses: ./.github/actions/node/setup - run: yarn install @@ -1240,4 +1242,4 @@ jobs: - uses: ./.github/actions/node/latest - run: yarn test:plugins:ci - if: always() - uses: ./.github/actions/testagent/logs \ No newline at end of file + uses: ./.github/actions/testagent/logs diff --git a/.github/workflows/profiling.yml b/.github/workflows/profiling.yml index 05e9696cc48..90731d062cf 100644 --- a/.github/workflows/profiling.yml +++ b/.github/workflows/profiling.yml @@ -15,16 +15,16 @@ jobs: macos: runs-on: macos-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - run: yarn test:profiler:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 ubuntu: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/16 @@ -35,13 +35,13 @@ jobs: - run: yarn test:profiler:ci - uses: ./.github/actions/node/latest - run: yarn test:profiler:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 windows: runs-on: windows-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - run: yarn test:profiler:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index 05079d33112..34793f3e111 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -21,7 +21,7 @@ jobs: version: [16, 18, latest] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v3 with: node-version: ${{ matrix.version }} @@ -41,7 +41,7 @@ jobs: DD_CIVISIBILITY_AGENTLESS_ENABLED: 1 DD_API_KEY: ${{ secrets.DD_API_KEY_CI_APP }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v3 with: node-version: ${{ matrix.version }} @@ -62,7 +62,7 @@ jobs: DD_CIVISIBILITY_AGENTLESS_ENABLED: 1 DD_API_KEY: ${{ secrets.DD_API_KEY_CI_APP }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: actions/setup-node@v3 @@ -76,7 +76,7 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - run: yarn lint @@ -84,7 +84,7 @@ jobs: typescript: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - run: yarn type:test diff --git a/.github/workflows/release-3.yml b/.github/workflows/release-3.yml index ec25371051a..e8791ff645a 100644 --- a/.github/workflows/release-3.yml +++ b/.github/workflows/release-3.yml @@ -19,7 +19,7 @@ jobs: env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: actions/setup-node@v3 with: registry-url: 'https://registry.npmjs.org' @@ -27,7 +27,7 @@ jobs: - id: pkg run: | content=`cat ./package.json | tr '\n' ' '` - echo "::set-output name=json::$content" + echo "json=$content" >> $GITHUB_OUTPUT - run: | git tag v${{ fromJson(steps.pkg.outputs.json).version }} git push origin v${{ fromJson(steps.pkg.outputs.json).version }} @@ -36,7 +36,7 @@ jobs: runs-on: ubuntu-latest needs: ['publish'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v3 - name: Log in to the Container registry uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b @@ -47,7 +47,7 @@ jobs: - id: pkg run: | content=`cat ./package.json | tr '\n' ' '` - echo "::set-output name=json::$content" + echo "json=$content" >> $GITHUB_OUTPUT - name: npm pack for injection image run: | npm pack dd-trace@${{ fromJson(steps.pkg.outputs.json).version }} diff --git a/.github/workflows/release-dev.yml b/.github/workflows/release-dev.yml index 936c0ee0737..fc00326a27f 100644 --- a/.github/workflows/release-dev.yml +++ b/.github/workflows/release-dev.yml @@ -15,7 +15,7 @@ jobs: env: NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v3 with: registry-url: 'https://registry.npmjs.org' @@ -23,7 +23,7 @@ jobs: - id: pkg run: | content=`cat ./package.json | tr '\n' ' '` - echo "::set-output name=json::$content" + echo "json=$content" >> $GITHUB_OUTPUT - run: npm version --no-git-tag-version ${{ fromJson(steps.pkg.outputs.json).version }}-$(git rev-parse --short HEAD)+${{ github.run_id }}.${{ github.run_attempt }} - run: npm publish --tag dev --provenance - run: | @@ -35,7 +35,7 @@ jobs: runs-on: ubuntu-latest needs: ['dev_release'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v3 - name: Log in to the Container registry uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b diff --git a/.github/workflows/release-latest.yml b/.github/workflows/release-latest.yml index a45ed3c87a7..b01be5a4f56 100644 --- a/.github/workflows/release-latest.yml +++ b/.github/workflows/release-latest.yml @@ -21,7 +21,7 @@ jobs: outputs: pkgjson: ${{ steps.pkg.outputs.json }} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: actions/setup-node@v3 with: registry-url: 'https://registry.npmjs.org' @@ -29,7 +29,7 @@ jobs: - id: pkg run: | content=`cat ./package.json | tr '\n' ' '` - echo "::set-output name=json::$content" + echo "json=$content" >> $GITHUB_OUTPUT - run: | git tag v${{ fromJson(steps.pkg.outputs.json).version }} git push origin v${{ fromJson(steps.pkg.outputs.json).version }} @@ -38,7 +38,7 @@ jobs: runs-on: ubuntu-latest needs: ['publish'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v3 - name: Log in to the Container registry uses: docker/login-action@49ed152c8eca782a232dede0303416e8f356c37b @@ -49,7 +49,7 @@ jobs: - id: pkg run: | content=`cat ./package.json | tr '\n' ' '` - echo "::set-output name=json::$content" + echo "json=$content" >> $GITHUB_OUTPUT - name: npm pack for injection image run: | npm pack dd-trace@${{ fromJson(steps.pkg.outputs.json).version }} @@ -61,12 +61,12 @@ jobs: runs-on: ubuntu-latest needs: ['publish'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: actions/setup-node@v3 - id: pkg run: | content=`cat ./package.json | tr '\n' ' '` - echo "::set-output name=json::$content" + echo "json=$content" >> $GITHUB_OUTPUT - run: yarn - name: Build working-directory: docs @@ -74,7 +74,7 @@ jobs: yarn yarn build mv out /tmp/out - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: gh-pages - name: Deploy diff --git a/.github/workflows/release-proposal.yml b/.github/workflows/release-proposal.yml index 4935f78c232..5faf193d3ef 100644 --- a/.github/workflows/release-proposal.yml +++ b/.github/workflows/release-proposal.yml @@ -8,7 +8,7 @@ jobs: check_labels: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: fetch-depth: 0 - uses: actions/setup-node@v3 diff --git a/.github/workflows/serverless-integration-test.yml b/.github/workflows/serverless-integration-test.yml index 1687b18fc22..be3eeede960 100644 --- a/.github/workflows/serverless-integration-test.yml +++ b/.github/workflows/serverless-integration-test.yml @@ -16,7 +16,7 @@ jobs: version: [16, latest] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: actions/setup-node@v3 diff --git a/.github/workflows/serverless-performance.yml b/.github/workflows/serverless-performance.yml index 47c330ddc4f..a23b18a9bf2 100644 --- a/.github/workflows/serverless-performance.yml +++ b/.github/workflows/serverless-performance.yml @@ -16,9 +16,9 @@ jobs: aws-runtime-name: "nodejs18.x" steps: - name: Checkout dd-trace-js - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Checkout datadog-lambda-js - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: DataDog/datadog-lambda-js path: datadog-lambda-js diff --git a/.github/workflows/system-tests.yml b/.github/workflows/system-tests.yml index 30503e8452b..8eca425b086 100644 --- a/.github/workflows/system-tests.yml +++ b/.github/workflows/system-tests.yml @@ -26,12 +26,12 @@ jobs: steps: - name: Checkout system tests - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: 'DataDog/system-tests' - name: Checkout dd-trace-js - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: 'binaries/dd-trace-js' @@ -66,14 +66,14 @@ jobs: NODEJS_DDTRACE_MODULE: datadog/dd-trace-js#${{ github.sha }} steps: - name: Checkout system tests - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: 'DataDog/system-tests' - uses: actions/setup-python@v4 with: python-version: '3.9' - name: Checkout dd-trace-js - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: 'binaries/dd-trace-js' - name: Build diff --git a/.github/workflows/test-k8s-lib-injection.yaml b/.github/workflows/test-k8s-lib-injection.yaml index d489708c06a..d0e971e420b 100644 --- a/.github/workflows/test-k8s-lib-injection.yaml +++ b/.github/workflows/test-k8s-lib-injection.yaml @@ -26,7 +26,7 @@ jobs: id: set_names run: | echo "Docker image tag: $(echo ${GITHUB_HEAD_REF-${GITHUB_REF#refs/heads/}} | tr / -)" - echo "::set-output name=image_name::$(echo ${GITHUB_HEAD_REF-${GITHUB_REF#refs/heads/}} | tr / -)" + echo "image_name=$(echo ${GITHUB_HEAD_REF-${GITHUB_REF#refs/heads/}} | tr / -)" >> $GITHUB_OUTPUT - name: Npm pack for injection image run: | diff --git a/.github/workflows/tracing.yml b/.github/workflows/tracing.yml index 1b580a24aa3..3062df0166a 100644 --- a/.github/workflows/tracing.yml +++ b/.github/workflows/tracing.yml @@ -15,16 +15,16 @@ jobs: macos: runs-on: macos-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - run: yarn test:trace:core:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 ubuntu: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - uses: ./.github/actions/node/16 @@ -35,13 +35,13 @@ jobs: - run: yarn test:trace:core:ci - uses: ./.github/actions/node/latest - run: yarn test:trace:core:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 windows: runs-on: windows-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: ./.github/actions/node/setup - run: yarn install - run: yarn test:trace:core:ci - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 From 3ce04b0e1c6685d031ea4b8158ef1db22bc80de8 Mon Sep 17 00:00:00 2001 From: Bowen Brooks <39347269+bojbrook@users.noreply.github.com> Date: Tue, 2 Jan 2024 07:06:38 -0500 Subject: [PATCH 145/147] Adding Pino Integration-test (#2002) --- integration-tests/pino.spec.js | 72 +++++++++++++++++++++++++++++++++ integration-tests/pino/index.js | 32 +++++++++++++++ 2 files changed, 104 insertions(+) create mode 100644 integration-tests/pino.spec.js create mode 100644 integration-tests/pino/index.js diff --git a/integration-tests/pino.spec.js b/integration-tests/pino.spec.js new file mode 100644 index 00000000000..a28bf72a08b --- /dev/null +++ b/integration-tests/pino.spec.js @@ -0,0 +1,72 @@ +/* eslint-disable comma-dangle */ +'use strict' + +const { FakeAgent, spawnProc, createSandbox, curl } = require('./helpers') +const path = require('path') +const { assert } = require('chai') +const { once } = require('events') + +describe('pino test', () => { + let agent + let proc + let sandbox + let cwd + let startupTestFile + + before(async () => { + sandbox = await createSandbox(['pino']) + cwd = sandbox.folder + startupTestFile = path.join(cwd, 'pino/index.js') + }) + + after(async () => { + await sandbox.remove() + }) + + context('Log injection', () => { + beforeEach(async () => { + agent = await new FakeAgent().start() + }) + + afterEach(async () => { + proc.kill() + await agent.stop() + }) + + it('Log injection enabled', async () => { + proc = await spawnProc(startupTestFile, { + cwd, + env: { + AGENT_PORT: agent.port, + lOG_INJECTION: true, + }, + stdio: 'pipe', + }) + const [data] = await Promise.all([once(proc.stdout, 'data'), curl(proc)]) + const stdoutData = JSON.parse(data.toString()) + assert.containsAllKeys(stdoutData, ['dd']) + assert.containsAllKeys(stdoutData.dd, ['trace_id', 'span_id']) + assert.strictEqual( + stdoutData['dd']['trace_id'], + stdoutData['custom']['trace_id'] + ) + assert.strictEqual( + stdoutData['dd']['span_id'], + stdoutData['custom']['span_id'] + ) + }) + + it('Log injection disabled', async () => { + proc = await spawnProc(startupTestFile, { + cwd, + env: { + AGENT_PORT: agent.port, + }, + stdio: 'pipe', + }) + const [data] = await Promise.all([once(proc.stdout, 'data'), curl(proc)]) + const stdoutData = JSON.parse(data.toString()) + assert.doesNotHaveAnyKeys(stdoutData, ['dd']) + }) + }) +}) diff --git a/integration-tests/pino/index.js b/integration-tests/pino/index.js new file mode 100644 index 00000000000..40e35388fac --- /dev/null +++ b/integration-tests/pino/index.js @@ -0,0 +1,32 @@ +'use strict' + +const options = {} + +if (process.env.AGENT_PORT) { + options.port = process.env.AGENT_PORT +} + +if (process.env.lOG_INJECTION) { + options.logInjection = process.env.lOG_INJECTION +} + +const tracer = require('dd-trace').init(options) + +const http = require('http') +const logger = require('pino')() + +const server = http + .createServer((req, res) => { + const span = tracer.scope().active() + const contextTraceId = span.context().toTraceId() + const contextSpanId = span.context().toSpanId() + logger.info( + { custom: { trace_id: contextTraceId, span_id: contextSpanId } }, + 'Creating server' + ) + res.end('hello, world\n') + }) + .listen(0, () => { + const port = server.address().port + process.send({ port }) + }) From ae72374597f41ba944fa4ea1d6aee4671ddbf86c Mon Sep 17 00:00:00 2001 From: Nicolas Savoire Date: Tue, 2 Jan 2024 13:13:34 +0100 Subject: [PATCH 146/147] Add `process_id` tag to profiles (#3911) Add a `process_id` that contains process pid to profiles. --- packages/dd-trace/src/profiling/exporters/agent.js | 1 + packages/dd-trace/test/profiling/exporters/agent.spec.js | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/dd-trace/src/profiling/exporters/agent.js b/packages/dd-trace/src/profiling/exporters/agent.js index 712d03f1406..517b774890e 100644 --- a/packages/dd-trace/src/profiling/exporters/agent.js +++ b/packages/dd-trace/src/profiling/exporters/agent.js @@ -75,6 +75,7 @@ class AgentExporter { ['tags[]', 'language:javascript'], ['tags[]', 'runtime:nodejs'], ['tags[]', `runtime_version:${process.version}`], + ['tags[]', `process_id:${process.pid}`], ['tags[]', `profiler_version:${version}`], ['tags[]', 'format:pprof'], ...Object.entries(tags).map(([key, value]) => ['tags[]', `${key}:${value}`]) diff --git a/packages/dd-trace/test/profiling/exporters/agent.spec.js b/packages/dd-trace/test/profiling/exporters/agent.spec.js index 92e7f097539..c1a1038d95c 100644 --- a/packages/dd-trace/test/profiling/exporters/agent.spec.js +++ b/packages/dd-trace/test/profiling/exporters/agent.spec.js @@ -75,6 +75,7 @@ describe('exporters/agent', function () { 'language:javascript', 'runtime:nodejs', `runtime_version:${process.version}`, + `process_id:${process.pid}`, `profiler_version:${version}`, 'format:pprof', 'runtime-id:a1b2c3d4-a1b2-a1b2-a1b2-a1b2c3d4e5f6' @@ -359,6 +360,7 @@ describe('exporters/agent', function () { 'language:javascript', 'runtime:nodejs', `runtime_version:${process.version}`, + `process_id:${process.pid}`, `profiler_version:${version}`, 'format:pprof', 'foo:bar' From 777fbea92c50a317bc39680c9b09d024d556141f Mon Sep 17 00:00:00 2001 From: Ida Liu <119438987+ida613@users.noreply.github.com> Date: Fri, 29 Sep 2023 11:04:39 -0400 Subject: [PATCH 147/147] v5.0.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index a54eab1239b..fec45bda8c7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dd-trace", - "version": "5.0.0-pre", + "version": "5.0.0", "description": "Datadog APM tracing client for JavaScript", "main": "index.js", "typings": "index.d.ts",