diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index e7b8202d559..587df5a9d3f 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -86,8 +86,8 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 - uses: ./.github/actions/node/latest - uses: ./.github/actions/install - - run: yarn type:test - - run: yarn type:doc + - run: npm run type:test + - run: npm run type:doc # TODO: Remove need for `npm show` before re-enabling to avoid rate limit errors. # verify-yaml: diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 0437621c99a..50dcfba8999 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -34,7 +34,11 @@ require,shell-quote,mit,Copyright (c) 2013 James Halliday require,source-map,BSD-3-Clause,Copyright (c) 2009-2011, Mozilla Foundation and contributors require,ttl-set,MIT,Copyright (c) 2024 Thomas Watson dev,@babel/helpers,MIT,Copyright (c) 2014-present Sebastian McKenzie and other contributors -dev,@types/node,MIT,Copyright Authors +dev,@types/chai,MIT,Copyright (c) Microsoft Corp. +dev,@types/mocha,MIT,Copyright (c) Microsoft Corp. +dev,@types/node,MIT,Copyright (c) Microsoft Corp. +dev,@types/sinon,MIT,Copyright (c) Microsoft Corp. +dev,@types/tap,MIT,Copyright (c) Microsoft Corp. dev,@eslint/eslintrc,MIT,Copyright OpenJS Foundation and other contributors, dev,@eslint/js,MIT,Copyright OpenJS Foundation and other contributors, dev,@msgpack/msgpack,ISC,Copyright 2019 The MessagePack Community @@ -67,6 +71,7 @@ dev,sinon,BSD-3-Clause,Copyright 2010-2017 Christian Johansen dev,sinon-chai,WTFPL and BSD-2-Clause,Copyright 2004 Sam Hocevar 2012–2017 Domenic Denicola dev,tap,ISC,Copyright 2011-2022 Isaac Z. Schlueter and Contributors dev,tiktoken,MIT,Copyright (c) 2022 OpenAI, Shantanu Jain +dev,typescript,Apache license 2.0,Copyright Microsoft Corp. dev,workerpool,Apache license 2.0,Copyright (C) 2014-2024 Jos de Jong wjosdejong@gmail.com dev,yaml,ISC,Copyright Eemeli Aro dev,yarn-deduplicate,Apache license 2.0,Copyright [yyyy] [name of copyright owner] diff --git a/benchmark/sirun/run-util.js b/benchmark/sirun/run-util.js index 58c4882190b..79bd90ed810 100644 --- a/benchmark/sirun/run-util.js +++ b/benchmark/sirun/run-util.js @@ -4,7 +4,7 @@ const childProcess = require('child_process') const readline = require('readline') function exec (...args) { - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { const proc = childProcess.spawn(...args) streamAddVersion(proc.stdout) proc.on('error', reject) @@ -15,7 +15,7 @@ function exec (...args) { reject(new Error('Process exited with non-zero code.')) } }) - }) + })) } function streamAddVersion (input) { diff --git a/docs/package.json b/docs/package.json index c68302e3eca..0f0afa155d2 100644 --- a/docs/package.json +++ b/docs/package.json @@ -10,7 +10,7 @@ "license": "BSD-3-Clause", "private": true, "devDependencies": { - "typedoc": "^0.25.13", - "typescript": "^4.9.4" + "typedoc": "^0.28.12", + "typescript": "^5.9.2" } } diff --git a/docs/yarn.lock b/docs/yarn.lock index 4c517dabb07..727453c6033 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -2,10 +2,68 @@ # yarn lockfile v1 -ansi-sequence-parser@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ansi-sequence-parser/-/ansi-sequence-parser-1.1.1.tgz#e0aa1cdcbc8f8bb0b5bca625aac41f5f056973cf" - integrity sha512-vJXt3yiaUL4UU546s3rPXlsry/RnM730G1+HkpKE012AN0sx1eOrxSu95oKDIonskeLTijMgqWZ3uDEe3NFvyg== +"@gerrit0/mini-shiki@^3.12.0": + version "3.12.1" + resolved "https://registry.yarnpkg.com/@gerrit0/mini-shiki/-/mini-shiki-3.12.1.tgz#d6b40216d05f86fee07d27a30957dc7ff994b39a" + integrity sha512-qA9/VGm7No0kxb7k0oKFT0DSJ6BtuMMtC7JQdZn9ElMALE9hjbyoaS13Y8OWr0qHwzh07KHt3Wbw34az/FLsrg== + dependencies: + "@shikijs/engine-oniguruma" "^3.12.1" + "@shikijs/langs" "^3.12.1" + "@shikijs/themes" "^3.12.1" + "@shikijs/types" "^3.12.1" + "@shikijs/vscode-textmate" "^10.0.2" + +"@shikijs/engine-oniguruma@^3.12.1": + version "3.12.1" + resolved "https://registry.yarnpkg.com/@shikijs/engine-oniguruma/-/engine-oniguruma-3.12.1.tgz#b93c07abfc83158350888a407fd6044bc48b19b9" + integrity sha512-hbYq+XOc55CU7Irkhsgwh8WgQbx2W5IVzHV4l+wZ874olMLSNg5o3F73vo9m4SAhimFyqq/86xnx9h+T30HhhQ== + dependencies: + "@shikijs/types" "3.12.1" + "@shikijs/vscode-textmate" "^10.0.2" + +"@shikijs/langs@^3.12.1": + version "3.12.1" + resolved "https://registry.yarnpkg.com/@shikijs/langs/-/langs-3.12.1.tgz#8083318b1c7dad41b990f5644d7a27b93514e11f" + integrity sha512-Y1MbMfVO5baRz7Boo7EoD36TmzfUx/I5n8e+wZumx6SlUA81Zj1ZwNJL871iIuSHrdsheV4AxJtHQ9mlooklmg== + dependencies: + "@shikijs/types" "3.12.1" + +"@shikijs/themes@^3.12.1": + version "3.12.1" + resolved "https://registry.yarnpkg.com/@shikijs/themes/-/themes-3.12.1.tgz#df329febeaa80a931d2dfc8528342ad4bd55140f" + integrity sha512-9JrAm9cA5hqM/YXymA3oAAZdnCgQf1zyrNDtsnM105nNEoEpux4dyzdoOjc2KawEKj1iUs/WH2ota6Atp7GYkQ== + dependencies: + "@shikijs/types" "3.12.1" + +"@shikijs/types@3.12.1", "@shikijs/types@^3.12.1": + version "3.12.1" + resolved "https://registry.yarnpkg.com/@shikijs/types/-/types-3.12.1.tgz#d9eef9ed3bbcf78d1a40a0e5527a1022e0067547" + integrity sha512-Is/p+1vTss22LIsGCJTmGrxu7ZC1iBL9doJFYLaZ4aI8d0VDXb7Mn0kBzhkc7pdsRpmUbQLQ5HXwNpa3H6F8og== + dependencies: + "@shikijs/vscode-textmate" "^10.0.2" + "@types/hast" "^3.0.4" + +"@shikijs/vscode-textmate@^10.0.2": + version "10.0.2" + resolved "https://registry.yarnpkg.com/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz#a90ab31d0cc1dfb54c66a69e515bf624fa7b2224" + integrity sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg== + +"@types/hast@^3.0.4": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/hast/-/hast-3.0.4.tgz#1d6b39993b82cea6ad783945b0508c25903e15aa" + integrity sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ== + dependencies: + "@types/unist" "*" + +"@types/unist@*": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-3.0.3.tgz#acaab0f919ce69cce629c2d4ed2eb4adc1b6c20c" + integrity sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q== + +argparse@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38" + integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q== balanced-match@^1.0.0: version "1.0.2" @@ -19,59 +77,74 @@ brace-expansion@^2.0.1: dependencies: balanced-match "^1.0.0" -jsonc-parser@^3.2.0: - version "3.3.1" - resolved "https://registry.yarnpkg.com/jsonc-parser/-/jsonc-parser-3.3.1.tgz#f2a524b4f7fd11e3d791e559977ad60b98b798b4" - integrity sha512-HUgH65KyejrUFPvHFPbqOY0rsFip3Bo5wb4ngvdi1EpCYWUQDC5V+Y7mZws+DLkr4M//zQJoanu1SP+87Dv1oQ== +entities@^4.4.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48" + integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== + +linkify-it@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/linkify-it/-/linkify-it-5.0.0.tgz#9ef238bfa6dc70bd8e7f9572b52d369af569b421" + integrity sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ== + dependencies: + uc.micro "^2.0.0" lunr@^2.3.9: version "2.3.9" resolved "https://registry.yarnpkg.com/lunr/-/lunr-2.3.9.tgz#18b123142832337dd6e964df1a5a7707b25d35e1" integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== -marked@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/marked/-/marked-4.3.0.tgz#796362821b019f734054582038b116481b456cf3" - integrity sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A== +markdown-it@^14.1.0: + version "14.1.0" + resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-14.1.0.tgz#3c3c5992883c633db4714ccb4d7b5935d98b7d45" + integrity sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg== + dependencies: + argparse "^2.0.1" + entities "^4.4.0" + linkify-it "^5.0.0" + mdurl "^2.0.0" + punycode.js "^2.3.1" + uc.micro "^2.1.0" + +mdurl@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-2.0.0.tgz#80676ec0433025dd3e17ee983d0fe8de5a2237e0" + integrity sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w== -minimatch@^9.0.3: +minimatch@^9.0.5: version "9.0.5" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.5.tgz#d74f9dd6b57d83d8e98cfb82133b03978bc929e5" integrity sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow== dependencies: brace-expansion "^2.0.1" -shiki@^0.14.7: - version "0.14.7" - resolved "https://registry.yarnpkg.com/shiki/-/shiki-0.14.7.tgz#c3c9e1853e9737845f1d2ef81b31bcfb07056d4e" - integrity sha512-dNPAPrxSc87ua2sKJ3H5dQ/6ZaY8RNnaAqK+t0eG7p0Soi2ydiqbGOTaZCqaYvA/uZYfS1LJnemt3Q+mSfcPCg== - dependencies: - ansi-sequence-parser "^1.1.0" - jsonc-parser "^3.2.0" - vscode-oniguruma "^1.7.0" - vscode-textmate "^8.0.0" - -typedoc@^0.25.13: - version "0.25.13" - resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.25.13.tgz#9a98819e3b2d155a6d78589b46fa4c03768f0922" - integrity sha512-pQqiwiJ+Z4pigfOnnysObszLiU3mVLWAExSPf+Mu06G/qsc3wzbuM56SZQvONhHLncLUhYzOVkjFFpFfL5AzhQ== +punycode.js@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/punycode.js/-/punycode.js-2.3.1.tgz#6b53e56ad75588234e79f4affa90972c7dd8cdb7" + integrity sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA== + +typedoc@^0.28.12: + version "0.28.12" + resolved "https://registry.yarnpkg.com/typedoc/-/typedoc-0.28.12.tgz#1baa55ab242e237fc896bc01b57cf5f8bd995d32" + integrity sha512-H5ODu4f7N+myG4MfuSp2Vh6wV+WLoZaEYxKPt2y8hmmqNEMVrH69DAjjdmYivF4tP/C2jrIZCZhPalZlTU/ipA== dependencies: + "@gerrit0/mini-shiki" "^3.12.0" lunr "^2.3.9" - marked "^4.3.0" - minimatch "^9.0.3" - shiki "^0.14.7" - -typescript@^4.9.4: - version "4.9.5" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" - integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== - -vscode-oniguruma@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/vscode-oniguruma/-/vscode-oniguruma-1.7.0.tgz#439bfad8fe71abd7798338d1cd3dc53a8beea94b" - integrity sha512-L9WMGRfrjOhgHSdOYgCt/yRMsXzLDJSL7BPrOZt73gU0iWO4mpqzqQzOz5srxqTvMBaR0XZTSrVWo4j55Rc6cA== - -vscode-textmate@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/vscode-textmate/-/vscode-textmate-8.0.0.tgz#2c7a3b1163ef0441097e0b5d6389cd5504b59e5d" - integrity sha512-AFbieoL7a5LMqcnOF04ji+rpXadgOXnZsxQr//r83kLPr7biP7am3g9zbaZIaBGwBRWeSvoMD4mgPdX3e4NWBg== + markdown-it "^14.1.0" + minimatch "^9.0.5" + yaml "^2.8.1" + +typescript@^5.9.2: + version "5.9.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.9.2.tgz#d93450cddec5154a2d5cabe3b8102b83316fb2a6" + integrity sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A== + +uc.micro@^2.0.0, uc.micro@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/uc.micro/-/uc.micro-2.1.0.tgz#f8d3f7d0ec4c3dea35a7e3c8efa4cb8b45c9e7ee" + integrity sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A== + +yaml@^2.8.1: + version "2.8.1" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.8.1.tgz#1870aa02b631f7e8328b93f8bc574fac5d6c4d79" + integrity sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw== diff --git a/integration-tests/appsec/index.spec.js b/integration-tests/appsec/index.spec.js index 08c483c7ce1..47863e4b6a1 100644 --- a/integration-tests/appsec/index.spec.js +++ b/integration-tests/appsec/index.spec.js @@ -35,10 +35,10 @@ describe('RASP', () => { execArgv, env: { DD_TRACE_AGENT_PORT: agent.port, - DD_APPSEC_ENABLED: true, - DD_APPSEC_RASP_ENABLED: true, + DD_APPSEC_ENABLED: 'true', + DD_APPSEC_RASP_ENABLED: 'true', DD_APPSEC_RULES: path.join(cwd, 'appsec/rasp/rasp_rules.json'), - DD_APPSEC_RASP_COLLECT_REQUEST_BODY: collectRequestBody + DD_APPSEC_RASP_COLLECT_REQUEST_BODY: String(collectRequestBody) } }, stdOutputHandler, stdOutputHandler) axios = Axios.create({ baseURL: proc.url }) @@ -90,7 +90,7 @@ describe('RASP', () => { await assertExploitDetected() } - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { setTimeout(() => { if (hasOutput) { reject(new Error('Unexpected output in stdout/stderr after blocking request')) @@ -98,7 +98,7 @@ describe('RASP', () => { resolve() } }, 50) - }) + })) } async function testCustomErrorHandlerIsNotExecuted (path) { @@ -119,7 +119,7 @@ describe('RASP', () => { assert.strictEqual(e.response.status, 403) await assertExploitDetected() - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { setTimeout(() => { if (hasOutput) { reject(new Error('uncaughtExceptionCaptureCallback executed')) @@ -127,7 +127,7 @@ describe('RASP', () => { resolve() } }, 10) - }) + })) } } @@ -140,7 +140,7 @@ describe('RASP', () => { try { await axios.get('/crash') } catch (e) { - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { setTimeout(() => { if (hasOutput) { resolve() @@ -148,7 +148,7 @@ describe('RASP', () => { reject(new Error('Output expected after crash')) } }, 50) - }) + })) } assert.fail('Request should have failed') @@ -168,7 +168,7 @@ describe('RASP', () => { try { await axios.get('/crash-and-recovery-A') } catch (e) { - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { setTimeout(() => { if (hasOutput) { reject(new Error('Unexpected output in stdout/stderr after blocking request')) @@ -176,7 +176,7 @@ describe('RASP', () => { resolve() } }, 50) - }) + })) } assert.fail('Request should have failed') @@ -191,7 +191,7 @@ describe('RASP', () => { try { await axios.get('/crash-and-recovery-B') } catch (e) { - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { setTimeout(() => { if (hasOutput) { reject(new Error('Unexpected output in stdout/stderr after blocking request')) @@ -199,7 +199,7 @@ describe('RASP', () => { resolve() } }, 50) - }) + })) } assert.fail('Request should have failed') diff --git a/integration-tests/ci-visibility/test-api-manual/test.fake.js b/integration-tests/ci-visibility/test-api-manual/test.fake.js index a3256bc6f42..2da7018e071 100644 --- a/integration-tests/ci-visibility/test-api-manual/test.fake.js +++ b/integration-tests/ci-visibility/test-api-manual/test.fake.js @@ -2,6 +2,8 @@ const { channel } = require('dc-polyfill') const tracer = require('dd-trace') +const assert = require('assert/strict') + const testStartCh = channel('dd-trace:ci:manual:test:start') const testFinishCh = channel('dd-trace:ci:manual:test:finish') const testAddTagsCh = channel('dd-trace:ci:manual:test:addTags') @@ -22,12 +24,12 @@ describe('can run tests', () => { assert.equal(1, 2) }) test('async test will pass', () => { - return new Promise((resolve) => { + return /** @type {Promise} */ (new Promise((resolve) => { setTimeout(() => { assert.equal(1, 1) resolve() }, 10) - }) + })) }) test('integration test', () => { // Just for testing purposes, so we don't create a custom span @@ -38,11 +40,11 @@ describe('can run tests', () => { const childSpan = tracer.startSpan('custom.span', { childOf: testSpan }) - return new Promise((resolve) => { + return /** @type {Promise} */ (new Promise((resolve) => { setTimeout(() => { childSpan.finish() resolve() }, 10) - }) + })) }) }) diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index 8bb7a08682e..764f00f99c4 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -137,10 +137,10 @@ moduleTypes.forEach(({ // Install cypress' browser before running the tests await execPromise('npx cypress install', { cwd, env: restOfEnv, stdio: 'inherit' }) - await new Promise(resolve => webAppServer.listen(0, 'localhost', () => { + await /** @type {Promise} */ (new Promise(resolve => webAppServer.listen(0, 'localhost', () => { webAppPort = webAppServer.address().port resolve() - })) + }))) }) after(async () => { diff --git a/integration-tests/helpers/fake-agent.js b/integration-tests/helpers/fake-agent.js index f85145769a0..2c60e822804 100644 --- a/integration-tests/helpers/fake-agent.js +++ b/integration-tests/helpers/fake-agent.js @@ -135,7 +135,7 @@ module.exports = class FakeAgent extends EventEmitter { resultReject(new Error(`timeout${errorsMsg}`, { cause: { errors } })) }, timeout) - const resultPromise = new Promise((resolve, reject) => { + const resultPromise = /** @type {Promise} */ (new Promise((resolve, reject) => { resultResolve = () => { clearTimeout(timeoutObj) resolve() @@ -144,7 +144,7 @@ module.exports = class FakeAgent extends EventEmitter { clearTimeout(timeoutObj) reject(e) } - }) + })) const messageHandler = msg => { try { @@ -199,7 +199,7 @@ module.exports = class FakeAgent extends EventEmitter { resultReject(new Error(`timeout${errorsMsg}`, { cause: { errors } })) }, timeout) - const resultPromise = new Promise((resolve, reject) => { + const resultPromise = /** @type {Promise} */ (new Promise((resolve, reject) => { resultResolve = () => { clearTimeout(timeoutObj) resolve() @@ -208,7 +208,7 @@ module.exports = class FakeAgent extends EventEmitter { clearTimeout(timeoutObj) reject(e) } - }) + })) const messageHandler = msg => { if (msg.payload.request_type !== requestType) return @@ -242,7 +242,7 @@ module.exports = class FakeAgent extends EventEmitter { resultReject(new Error(`timeout${errorsMsg}`, { cause: { errors } })) }, timeout) - const resultPromise = new Promise((resolve, reject) => { + const resultPromise = /** @type {Promise} */ (new Promise((resolve, reject) => { resultResolve = () => { clearTimeout(timeoutObj) resolve() @@ -251,7 +251,7 @@ module.exports = class FakeAgent extends EventEmitter { clearTimeout(timeoutObj) reject(e) } - }) + })) const messageHandler = msg => { try { diff --git a/integration-tests/helpers/index.js b/integration-tests/helpers/index.js index 63676d2e3db..1a8cfe8d629 100644 --- a/integration-tests/helpers/index.js +++ b/integration-tests/helpers/index.js @@ -21,6 +21,12 @@ const hookFile = 'dd-trace/loader-hook.mjs' // This is set by the setShouldKill function let shouldKill +/** + * @param {string} filename + * @param {string} cwd + * @param {string|function} expectedOut + * @param {string} expectedSource + */ async function runAndCheckOutput (filename, cwd, expectedOut, expectedSource) { const proc = spawn(process.execPath, [filename], { cwd, stdio: 'pipe' }) const pid = proc.pid @@ -58,7 +64,14 @@ async function runAndCheckOutput (filename, cwd, expectedOut, expectedSource) { // This is set by the useSandbox function let sandbox -// This _must_ be used with the useSandbox function +/** + * This _must_ be used with the useSandbox function + * + * @param {string} filename + * @param {string|function} expectedOut + * @param {string[]} expectedTelemetryPoints + * @param {string} expectedSource + */ async function runAndCheckWithTelemetry (filename, expectedOut, expectedTelemetryPoints, expectedSource) { const cwd = sandbox.folder const cleanup = telemetryForwarder(expectedTelemetryPoints.length > 0) @@ -74,6 +87,11 @@ async function runAndCheckWithTelemetry (filename, expectedOut, expectedTelemetr } } +/** + * @param {number} pid + * @param {[string, { metadata: Record, points: { name: string, tags: string[] }[] }][]} msgs + * @param {string[]} expectedTelemetryPoints + */ function assertTelemetryPoints (pid, msgs, expectedTelemetryPoints) { let points = [] for (const [telemetryType, data] of msgs) { @@ -91,9 +109,13 @@ function assertTelemetryPoints (pid, msgs, expectedTelemetryPoints) { return a === b ? 0 : a < b ? -1 : 1 } + /** + * @param {...string} args + * @returns {{ name: string, tags: string[] }[]} + */ function getPoints (...args) { const expectedPoints = [] - let currentPoint = {} + let currentPoint = /** @type {{ name?: string, tags?: string[] }} */ ({}) for (const arg of args) { if (!currentPoint.name) { currentPoint.name = 'library_entrypoint.' + arg @@ -106,6 +128,10 @@ function assertTelemetryPoints (pid, msgs, expectedTelemetryPoints) { return expectedPoints } + /** + * @param {Record} actualMetadata + * @param {number} pid + */ function assertMetadata (actualMetadata, pid) { const expectedBasicMetadata = { language_name: 'nodejs', @@ -153,7 +179,7 @@ function assertTelemetryPoints (pid, msgs, expectedTelemetryPoints) { function spawnProc (filename, options = {}, stdioHandler, stderrHandler) { const proc = fork(filename, { ...options, stdio: 'pipe' }) - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { proc .on('message', ({ port }) => { if (typeof port !== 'number' && typeof port !== 'string') { @@ -185,9 +211,15 @@ function spawnProc (filename, options = {}, stdioHandler, stderrHandler) { // eslint-disable-next-line no-console if (!options.silent) console.error(data.toString()) }) - }) + })) } +/** + * @param {string[]} dependencies + * @param {boolean} isGitRepo + * @param {string[]} integrationTestsPaths + * @param {string} [followUpCommand] + */ async function createSandbox (dependencies = [], isGitRepo = false, integrationTestsPaths = ['./integration-tests/*'], followUpCommand) { const cappedDependencies = dependencies.map(dep => { @@ -337,10 +369,13 @@ function varySandbox (sandbox, filename, variants, namedVariant, packageName = v } /** - * @type {string[]} + * @type {['default', 'star', 'destructure']} */ varySandbox.VARIANTS = ['default', 'star', 'destructure'] +/** + * @param {boolean} shouldExpectTelemetryPoints + */ function telemetryForwarder (shouldExpectTelemetryPoints = true) { process.env.DD_TELEMETRY_FORWARDER_PATH = path.join(__dirname, '..', 'telemetry-forwarder.sh') @@ -389,6 +424,9 @@ function telemetryForwarder (shouldExpectTelemetryPoints = true) { return cleanup } +/** + * @param {string|{ then: (callback: () => Promise) => Promise }|URL} url + */ async function curl (url) { if (url !== null && typeof url === 'object') { if (url.then) { @@ -410,12 +448,23 @@ async function curl (url) { }) } +/** + * @param {FakeAgent} agent + * @param {string|{ then: (callback: () => Promise) => Promise }|URL} procOrUrl + * @param {function} fn + * @param {number} [timeout] + * @param {number} [expectedMessageCount] + * @param {boolean} [resolveAtFirstSuccess] + */ async function curlAndAssertMessage (agent, procOrUrl, fn, timeout, expectedMessageCount, resolveAtFirstSuccess) { const resultPromise = agent.assertMessageReceived(fn, timeout, expectedMessageCount, resolveAtFirstSuccess) await curl(procOrUrl) return resultPromise } +/** + * @param {number} port + */ function getCiVisAgentlessConfig (port) { // We remove GITHUB_WORKSPACE so the repository root is not assigned to dd-trace-js // We remove MOCHA_OPTIONS so the test runner doesn't run the tests twice @@ -430,6 +479,9 @@ function getCiVisAgentlessConfig (port) { } } +/** + * @param {number} port + */ function getCiVisEvpProxyConfig (port) { // We remove GITHUB_WORKSPACE so the repository root is not assigned to dd-trace-js // We remove MOCHA_OPTIONS so the test runner doesn't run the tests twice @@ -443,15 +495,26 @@ function getCiVisEvpProxyConfig (port) { } } +/** + * @param {object[][]} spans + * @param {string} name + */ function checkSpansForServiceName (spans, name) { return spans.some((span) => span.some((nestedSpan) => nestedSpan.name === name)) } +/** + * @param {string} cwd + * @param {string} serverFile + * @param {string|number} agentPort + * @param {function} [stdioHandler] + * @param {Record} [additionalEnvArgs] + */ async function spawnPluginIntegrationTestProc (cwd, serverFile, agentPort, stdioHandler, additionalEnvArgs = {}) { - let env = { + let env = /** @type {Record} */ ({ NODE_OPTIONS: `--loader=${hookFile}`, - DD_TRACE_AGENT_PORT: agentPort - } + DD_TRACE_AGENT_PORT: String(agentPort) + }) env = { ...process.env, ...env, ...additionalEnvArgs } return spawnProc(path.join(cwd, serverFile), { cwd, @@ -459,6 +522,9 @@ async function spawnPluginIntegrationTestProc (cwd, serverFile, agentPort, stdio }, stdioHandler) } +/** + * @param {Record} env + */ function useEnv (env) { before(() => { Object.assign(process.env, env) @@ -470,6 +536,9 @@ function useEnv (env) { }) } +/** + * @param {unknown[]} args + */ function useSandbox (...args) { before(async () => { sandbox = await createSandbox(...args) @@ -481,10 +550,16 @@ function useSandbox (...args) { }) } +/** + * @returns {string} + */ function sandboxCwd () { return sandbox.folder } +/** + * @param {boolean} value + */ function setShouldKill (value) { before(() => { shouldKill = value @@ -494,6 +569,7 @@ function setShouldKill (value) { }) } +// @ts-expect-error assert.partialDeepStrictEqual does not exist on older Node.js versions // eslint-disable-next-line n/no-unsupported-features/node-builtins const assertObjectContains = assert.partialDeepStrictEqual || function assertObjectContains (actual, expected) { if (Array.isArray(expected)) { @@ -533,6 +609,10 @@ const assertObjectContains = assert.partialDeepStrictEqual || function assertObj } } +/** + * @param {string} actual + * @param {string} [msg] + */ function assertUUID (actual, msg = 'not a valid UUID') { assert.match(actual, /^[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12}$/, msg) } diff --git a/integration-tests/opentelemetry.spec.js b/integration-tests/opentelemetry.spec.js index b5d5a46f4ce..878ebfd557a 100644 --- a/integration-tests/opentelemetry.spec.js +++ b/integration-tests/opentelemetry.spec.js @@ -26,7 +26,7 @@ async function check (agent, proc, timeout, onMessage = () => { }, isMetrics) { if (code !== 0) { reject(new Error(`Process exited with unexpected status code ${code}.`)) } else { - resolve() + resolve(undefined) } }) }) diff --git a/integration-tests/pino.spec.js b/integration-tests/pino.spec.js index 436ac6e497b..bb3d4c5ed42 100644 --- a/integration-tests/pino.spec.js +++ b/integration-tests/pino.spec.js @@ -81,7 +81,7 @@ describe('pino test', () => { cwd, env: { AGENT_PORT: agent.port, - lOG_INJECTION: false + lOG_INJECTION: 'false' }, stdio: 'pipe', }) diff --git a/integration-tests/profiler/index.js b/integration-tests/profiler/index.js index 5a7fba3989c..a8299dbbfb1 100644 --- a/integration-tests/profiler/index.js +++ b/integration-tests/profiler/index.js @@ -3,7 +3,7 @@ require('dd-trace').init() function busyWait (ms) { - return new Promise(resolve => { + return /** @type {Promise} */ (new Promise(resolve => { let done = false function work () { if (done) return @@ -16,9 +16,9 @@ function busyWait (ms) { setImmediate(work) setTimeout(() => { done = true - resolve(undefined) + resolve() }, ms) - }) + })) } const durationMs = Number.parseInt(process.env.TEST_DURATION_MS ?? '500') diff --git a/package.json b/package.json index 9db2dee22ba..42304bf7c1f 100644 --- a/package.json +++ b/package.json @@ -10,6 +10,7 @@ "bench": "node benchmark/index.js", "bench:e2e:test-optimization": "node benchmark/e2e-test-optimization/benchmark-run.js", "dependencies:dedupe": "yarn-deduplicate yarn.lock", + "type:check": "tsc --noEmit -p tsconfig.json", "type:doc": "cd docs && yarn && yarn build", "type:test": "cd docs && yarn && yarn test", "lint": "node scripts/check_licenses.js && eslint . --concurrency=auto --max-warnings 0", @@ -154,7 +155,11 @@ "@eslint/js": "^9.29.0", "@msgpack/msgpack": "^3.1.2", "@stylistic/eslint-plugin": "^5.0.0", + "@types/chai": "^4.3.16", + "@types/mocha": "^10.0.10", "@types/node": "^18.19.106", + "@types/sinon": "^17.0.4", + "@types/tap": "^15.0.12", "axios": "^1.12.2", "benchmark": "^2.1.4", "body-parser": "^2.2.0", @@ -183,6 +188,7 @@ "sinon-chai": "^3.7.0", "tap": "^16.3.10", "tiktoken": "^1.0.21", + "typescript": "^5.9.2", "workerpool": "^9.2.0", "yaml": "^2.8.0", "yarn-deduplicate": "^6.0.2" diff --git a/packages/datadog-core/src/storage.js b/packages/datadog-core/src/storage.js index b5c9739f337..d2f43cb9a85 100644 --- a/packages/datadog-core/src/storage.js +++ b/packages/datadog-core/src/storage.js @@ -10,12 +10,13 @@ const { AsyncLocalStorage } = require('async_hooks') * a "handle" object, which is used as a key in a WeakMap, where the values * are the real store objects. * - * @template T + * @typedef {Record} Store */ class DatadogStorage extends AsyncLocalStorage { /** * - * @param store {DatadogStorage} + * @param store {Store} + * @override */ enterWith (store) { const handle = {} @@ -34,7 +35,7 @@ class DatadogStorage extends AsyncLocalStorage { * * TODO: Refactor the Scope class to use a span-only store and remove this. * - * @returns {{}} + * @returns {Store} */ getHandle () { return super.getStore() @@ -46,9 +47,9 @@ class DatadogStorage extends AsyncLocalStorage { * retrieved through `getHandle()` can also be passed in to be used as the * key. This is useful if you've stashed a handle somewhere and want to * retrieve the store with it. - * - * @param [handle] {{}} - * @returns {T | undefined} + * @param {{}} [handle] + * @returns {Store | undefined} + * @override */ getStore (handle) { if (!handle) { @@ -66,11 +67,12 @@ class DatadogStorage extends AsyncLocalStorage { * when dealing with the parent store, so that we don't have to access the * WeakMap. * @template R - * @template TArgs extends any[] - * @param store {DatadogStorage} - * @param fn {() => R} - * @param args {TArgs} - * @returns {void} + * @template TArgs = unknown[] + * @param {Store} store + * @param {() => R} fn + * @param {...TArgs} args + * @returns {R} + * @override */ run (store, fn, ...args) { const prior = super.getStore() @@ -85,8 +87,7 @@ class DatadogStorage extends AsyncLocalStorage { /** * This is the map from handles to real stores, used in the class above. - * @template T - * @type {WeakMap} + * @type {WeakMap} */ const stores = new WeakMap() diff --git a/packages/datadog-instrumentations/src/helpers/instrument.js b/packages/datadog-instrumentations/src/helpers/instrument.js index 4698fc99269..a00b8944601 100644 --- a/packages/datadog-instrumentations/src/helpers/instrument.js +++ b/packages/datadog-instrumentations/src/helpers/instrument.js @@ -23,10 +23,11 @@ exports.tracingChannel = function (name) { } /** - * @param {string} args.name module name + * @param {object} args + * @param {string|string[]} args.name module name * @param {string[]} args.versions array of semver range strings - * @param {string} args.file path to file within package to instrument - * @param {string} args.filePattern pattern to match files within package to instrument + * @param {string} [args.file='index.js'] path to file within package to instrument + * @param {string} [args.filePattern] pattern to match files within package to instrument * @param Function hook */ exports.addHook = function addHook ({ name, versions, file, filePattern, patchDefault }, hook) { diff --git a/packages/datadog-plugin-amqp10/test/index.spec.js b/packages/datadog-plugin-amqp10/test/index.spec.js index a1eba7e59a8..4686755f323 100644 --- a/packages/datadog-plugin-amqp10/test/index.spec.js +++ b/packages/datadog-plugin-amqp10/test/index.spec.js @@ -97,7 +97,7 @@ describe('Plugin', () => { expect(span.metrics).to.have.property('network.destination.port', 5673) expect(span.metrics).to.have.property('amqp.connection.port', 5673) expect(span.metrics).to.have.property('amqp.link.handle', 1) - }, 2) + }) .then(done) .catch(done) @@ -116,7 +116,7 @@ describe('Plugin', () => { expect(span.meta).to.have.property(ERROR_MESSAGE, error.message) expect(span.meta).to.have.property(ERROR_STACK, error.stack) expect(span.meta).to.have.property('component', 'amqp10') - }, 2) + }) .then(done) .catch(done) @@ -167,7 +167,7 @@ describe('Plugin', () => { expect(span.meta).to.have.property('component', 'amqp10') expect(span.metrics).to.have.property('amqp.connection.port', 5673) expect(span.metrics).to.have.property('amqp.link.handle', 0) - }, 2) + }) .then(done) .catch(done) @@ -222,7 +222,7 @@ describe('Plugin', () => { const span = traces[0][0] expect(span).to.have.property('service', 'test-custom-name') - }, 2) + }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-amqplib/test/index.spec.js b/packages/datadog-plugin-amqplib/test/index.spec.js index a94131208d7..3bbd48ac3ce 100644 --- a/packages/datadog-plugin-amqplib/test/index.spec.js +++ b/packages/datadog-plugin-amqplib/test/index.spec.js @@ -83,7 +83,7 @@ describe('Plugin', () => { expect(span.meta).to.have.property('component', 'amqplib') expect(span.meta).to.have.property('_dd.integration', 'amqplib') expect(span.metrics).to.have.property('network.destination.port', 5672) - }, 2) + }) .then(done) .catch(done) @@ -103,7 +103,7 @@ describe('Plugin', () => { expect(span.meta).to.have.property('out.host', 'localhost') expect(span.meta).to.have.property('component', 'amqplib') expect(span.metrics).to.have.property('network.destination.port', 5672) - }, 3) + }) .then(done) .catch(done) @@ -123,7 +123,7 @@ describe('Plugin', () => { expect(span.meta).to.have.property(ERROR_MESSAGE, error.message) expect(span.meta).to.have.property(ERROR_STACK, error.stack) expect(span.meta).to.have.property('component', 'amqplib') - }, 2) + }) .then(done) .catch(done) @@ -163,7 +163,7 @@ describe('Plugin', () => { expect(span.meta).to.have.property('amqp.routingKey', 'routingKey') expect(span.meta).to.have.property('component', 'amqplib') expect(span.metrics).to.have.property('network.destination.port', 5672) - }, 3) + }) .then(done) .catch(done) @@ -183,7 +183,7 @@ describe('Plugin', () => { expect(span.meta).to.have.property(ERROR_MESSAGE, error.message) expect(span.meta).to.have.property(ERROR_STACK, error.stack) expect(span.meta).to.have.property('component', 'amqplib') - }, 2) + }) .then(done) .catch(done) @@ -218,7 +218,7 @@ describe('Plugin', () => { expect(span.meta).to.have.property('span.kind', 'consumer') expect(span.meta).to.have.property('amqp.consumerTag', consumerTag) expect(span.meta).to.have.property('component', 'amqplib') - }, 5) + }) .then(done) .catch(done) @@ -552,7 +552,7 @@ describe('Plugin', () => { .assertSomeTraces(traces => { expect(traces[0][0]).to.have.property('service', 'test-custom-service') expect(traces[0][0]).to.have.property('resource', `queue.declare ${queue}`) - }, 2) + }) .then(done) .catch(done) diff --git a/packages/datadog-plugin-confluentinc-kafka-javascript/src/index.js b/packages/datadog-plugin-confluentinc-kafka-javascript/src/index.js index 6850956d354..4b28530e72a 100644 --- a/packages/datadog-plugin-confluentinc-kafka-javascript/src/index.js +++ b/packages/datadog-plugin-confluentinc-kafka-javascript/src/index.js @@ -6,7 +6,13 @@ const BatchConsumerPlugin = require('./batch-consumer') const KafkajsPlugin = require('../../datadog-plugin-kafkajs/src/index') class ConfluentKafkaJsPlugin extends KafkajsPlugin { + /** + * @override + */ static id = 'confluentinc-kafka-javascript' + /** + * @override + */ static get plugins () { return { producer: ProducerPlugin, diff --git a/packages/datadog-plugin-cypress/src/cypress-plugin.js b/packages/datadog-plugin-cypress/src/cypress-plugin.js index 9ec14ecf8b9..56387acd0d8 100644 --- a/packages/datadog-plugin-cypress/src/cypress-plugin.js +++ b/packages/datadog-plugin-cypress/src/cypress-plugin.js @@ -389,7 +389,6 @@ class CypressPlugin { getTestSpan ({ testName, testSuite, isUnskippable, isForcedToRun, testSourceFile, isDisabled, isQuarantined }) { const testSuiteTags = { - [TEST_COMMAND]: this.command, [TEST_COMMAND]: this.command, [TEST_MODULE]: TEST_FRAMEWORK_NAME } diff --git a/packages/datadog-plugin-cypress/test/app-10/tsconfig.json b/packages/datadog-plugin-cypress/test/app-10/tsconfig.json new file mode 100644 index 00000000000..0967ef424bc --- /dev/null +++ b/packages/datadog-plugin-cypress/test/app-10/tsconfig.json @@ -0,0 +1 @@ +{} diff --git a/packages/datadog-plugin-cypress/test/app/tsconfig.json b/packages/datadog-plugin-cypress/test/app/tsconfig.json new file mode 100644 index 00000000000..0967ef424bc --- /dev/null +++ b/packages/datadog-plugin-cypress/test/app/tsconfig.json @@ -0,0 +1 @@ +{} diff --git a/packages/datadog-plugin-moleculer/test/integration-test/client.spec.js b/packages/datadog-plugin-moleculer/test/integration-test/client.spec.js index 127aa7fd988..a431b11fee0 100644 --- a/packages/datadog-plugin-moleculer/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-moleculer/test/integration-test/client.spec.js @@ -22,7 +22,7 @@ describe('esm', () => { }) after(async () => { - await sandbox.remove() + await sandbox?.remove() }) beforeEach(async () => { diff --git a/packages/datadog-plugin-mongodb-core/src/index.js b/packages/datadog-plugin-mongodb-core/src/index.js index ef4ec9641ca..6d7a5ca89eb 100644 --- a/packages/datadog-plugin-mongodb-core/src/index.js +++ b/packages/datadog-plugin-mongodb-core/src/index.js @@ -10,8 +10,14 @@ class MongodbCorePlugin extends DatabasePlugin { static component = 'mongodb' // avoid using db.name for peer.service since it includes the collection name // should be removed if one day this will be fixed + /** + * @override + */ static peerServicePrecursors = [] + /** + * @override + */ configure (config) { super.configure(config) @@ -54,11 +60,18 @@ class MongodbCorePlugin extends DatabasePlugin { return ctx.currentStore } + /** + * @override + */ getPeerService (tags) { - const ns = tags['db.name'] + let ns = tags['db.name'] if (ns && tags['peer.service'] === undefined) { + const dotIndex = ns.indexOf('.') + if (dotIndex !== -1) { + ns = ns.slice(0, dotIndex) + } // the mongo ns is either dbName either dbName.collection. So we keep the first part - tags['peer.service'] = ns.split('.', 1)[0] + tags['peer.service'] = ns } return super.getPeerService(tags) } @@ -114,13 +127,13 @@ function getQuery (cmd) { } function getResource (plugin, ns, query, operationName) { - const parts = [operationName, ns] + let resource = `${operationName} ${ns}` if (plugin.config.queryInResourceName && query) { - parts.push(query) + resource += ` ${query}` } - return parts.join(' ') + return resource } function truncate (input) { diff --git a/packages/datadog-plugin-mysql/test/index.spec.js b/packages/datadog-plugin-mysql/test/index.spec.js index e1cc3546749..21e249b06ea 100644 --- a/packages/datadog-plugin-mysql/test/index.spec.js +++ b/packages/datadog-plugin-mysql/test/index.spec.js @@ -315,7 +315,7 @@ describe('Plugin', () => { let remapStub before(async () => { - await agent.load('mysql', [{ dbmPropagationMode: 'service', service: 'serviced' }]) + await agent.load('mysql', { dbmPropagationMode: 'service', service: 'serviced' }) mysql = proxyquire(`../../../versions/mysql@${version}`, {}).get() connection = mysql.createConnection({ @@ -387,7 +387,7 @@ describe('Plugin', () => { let connection before(async () => { - await agent.load('mysql', [{ dbmPropagationMode: 'service', service: 'serviced' }]) + await agent.load('mysql', { dbmPropagationMode: 'service', service: 'serviced' }) mysql = proxyquire(`../../../versions/mysql@${version}`, {}).get() connection = mysql.createConnection({ @@ -437,7 +437,7 @@ describe('Plugin', () => { }) beforeEach(async () => { - await agent.load('mysql', [{ dbmPropagationMode: 'service', service: '~!@#$%^&*()_+|??/<>' }]) + await agent.load('mysql', { dbmPropagationMode: 'service', service: '~!@#$%^&*()_+|??/<>' }) mysql = proxyquire(`../../../versions/mysql@${version}`, {}).get() connection = mysql.createConnection({ @@ -471,7 +471,7 @@ describe('Plugin', () => { }) beforeEach(async () => { - await agent.load('mysql', [{ dbmPropagationMode: 'full', service: 'post' }]) + await agent.load('mysql', { dbmPropagationMode: 'full', service: 'post' }) mysql = proxyquire(`../../../versions/mysql@${version}`, {}).get() connection = mysql.createConnection({ @@ -551,7 +551,7 @@ describe('Plugin', () => { }) beforeEach(async () => { - await agent.load('mysql', [{ dbmPropagationMode: 'full', service: 'post' }]) + await agent.load('mysql', { dbmPropagationMode: 'full', service: 'post' }) mysql = proxyquire(`../../../versions/mysql@${version}`, {}).get() pool = mysql.createPool({ diff --git a/packages/dd-trace/src/appsec/waf/index.js b/packages/dd-trace/src/appsec/waf/index.js index a2035073f73..8c8fc935e65 100644 --- a/packages/dd-trace/src/appsec/waf/index.js +++ b/packages/dd-trace/src/appsec/waf/index.js @@ -9,7 +9,38 @@ const { ASM } = require('../../standalone/product') const web = require('../../plugins/util/web') const { updateRateLimitedMetric } = require('../telemetry') +/** + * Types for WAF public API. + * + * @typedef {import('http').IncomingMessage} IncomingMessage + * @typedef {import('./waf_manager')} WAFManagerCtor + * @typedef {import('./waf_manager')} WAFManagerInstance + * @typedef {import('./waf_manager').WafConfig} WafConfig + * + * @typedef {{ + * persistent?: Record | null, + * ephemeral?: Record | null + * }} WafRunPayload + * + * @typedef {{ keep?: boolean } & Record} WafRunResult + * + * @typedef {{ + * wafManager: WAFManagerInstance | null, + * init: (rules: object, config: WafConfig) => void, + * destroy: () => void, + * updateConfig: (product: string, configId: string, configPath: string, config: object) => void, + * removeConfig: (configPath: string) => void, + * checkAsmDdFallback: () => void, + * run: (data: WafRunPayload, req?: IncomingMessage, raspRule?: string) => WafRunResult | void, + * disposeContext: (req: IncomingMessage) => void, + * WafUpdateError: typeof WafUpdateError + * }} WafAPI + */ + class WafUpdateError extends Error { + /** + * @param {object} diagnosticErrors + */ constructor (diagnosticErrors) { super('WafUpdateError') this.name = 'WafUpdateError' @@ -19,6 +50,7 @@ class WafUpdateError extends Error { let limiter = new Limiter(100) +/** @type {Partial} */ const waf = { wafManager: null, init, @@ -26,11 +58,18 @@ const waf = { updateConfig, removeConfig, checkAsmDdFallback, - run: noop, - disposeContext: noop, + run: /** @type {WafAPI['run']} */ noop, + disposeContext: /** @type {WafAPI['disposeContext']} */ noop, WafUpdateError } +/** + * Initialize the WAF with provided rules and configuration. + * + * @param {object} rules + * @param {WafConfig} config + * @returns {void} + */ function init (rules, config) { destroy() @@ -41,10 +80,13 @@ function init (rules, config) { waf.wafManager = new WAFManager(rules, config) + /** @type {WafAPI['run']} */ waf.run = run + /** @type {WafAPI['disposeContext']} */ waf.disposeContext = disposeContext } +/** @returns {void} */ function destroy () { if (waf.wafManager) { waf.wafManager.destroy() @@ -65,19 +107,28 @@ function checkAsmDdFallback () { } } +/** + * @param {string} product + * @param {string} configId + * @param {string} configPath + * @param {object} config + * @returns {void} + */ function updateConfig (product, configId, configPath, config) { if (!waf.wafManager) throw new Error('Cannot update disabled WAF') try { + const wm = /** @type {import('./waf_manager')} */ (waf.wafManager) if (product === 'ASM_DD') { - waf.wafManager.removeConfig(waf.wafManager.constructor.defaultWafConfigPath) + // defaultWafConfigPath is a static on the WAFManager class + wm.removeConfig(wm.constructor.defaultWafConfigPath) } - const updateSucceeded = waf.wafManager.updateConfig(configPath, config) - Reporter.reportWafConfigUpdate(product, configId, waf.wafManager.ddwaf.diagnostics, waf.wafManager.ddwafVersion) + const updateSucceeded = wm.updateConfig(configPath, config) + Reporter.reportWafConfigUpdate(product, configId, wm.ddwaf.diagnostics, wm.ddwafVersion) if (!updateSucceeded) { - throw new WafUpdateError(waf.wafManager.ddwaf.diagnostics) + throw new WafUpdateError(wm.ddwaf.diagnostics) } } catch (err) { log.error('[ASM] Could not update config from RC') @@ -85,17 +136,33 @@ function updateConfig (product, configId, configPath, config) { } } +/** + * @param {string} configPath + * @returns {void} + */ function removeConfig (configPath) { if (!waf.wafManager) throw new Error('Cannot update disabled WAF') try { - waf.wafManager.removeConfig(configPath) + const wm = /** @type {import('./waf_manager')} */ (waf.wafManager) + wm.removeConfig(configPath) } catch (err) { log.error('[ASM] Could not remove config from RC') throw err } } +/** + * Execute the WAF for the given payload and request. + * + * When no request is provided, attempts to use the current store's `req`. + * If the result indicates the trace should be kept (result.keep), applies ASM sampling behavior. + * + * @param {WafRunPayload} data + * @param {IncomingMessage=} req + * @param {string=} raspRule + * @returns {WafRunResult | undefined} + */ function run (data, req, raspRule) { if (!req) { const store = storage('legacy').getStore() @@ -107,7 +174,7 @@ function run (data, req, raspRule) { req = store.req } - const wafContext = waf.wafManager.getWAFContext(req) + const wafContext = /** @type {import('./waf_manager')} */ (waf.wafManager).getWAFContext(req) const result = wafContext.run(data, raspRule) if (result?.keep) { @@ -122,8 +189,14 @@ function run (data, req, raspRule) { return result } +/** + * Dispose the WAF context for the given request. + * + * @param {IncomingMessage} req + * @returns {void} + */ function disposeContext (req) { - const wafContext = waf.wafManager.getWAFContext(req) + const wafContext = /** @type {import('./waf_manager')} */ (waf.wafManager).getWAFContext(req) if (wafContext && !wafContext.ddwafContext.disposed) { wafContext.dispose() diff --git a/packages/dd-trace/src/appsec/waf/waf_manager.js b/packages/dd-trace/src/appsec/waf/waf_manager.js index 05c77272526..392038ddbe0 100644 --- a/packages/dd-trace/src/appsec/waf/waf_manager.js +++ b/packages/dd-trace/src/appsec/waf/waf_manager.js @@ -1,5 +1,44 @@ 'use strict' +/** + * @typedef {object} WafConfig + * @property {number} wafTimeout + * @property {number} rateLimit + * @property {RegExp | undefined} obfuscatorKeyRegex + * @property {RegExp | undefined} obfuscatorValueRegex + */ + +/** + * @typedef {object} DDWAFDiagnostics + * @property {string | undefined} ruleset_version + * @property {unknown} [rules] + */ + +/** + * @typedef {object} DDWAF + * @property {DDWAFDiagnostics} diagnostics + * @property {Set} knownAddresses + * @property {string[]} [configPaths] + * @property {(rules: object, path: string) => boolean} createOrUpdateConfig + * @property {(path: string) => void} removeConfig + * @property {() => void} dispose + * @property {() => DDWAFContext} createContext + */ + +/** + * @typedef {object} DDWAFContext + * @property {boolean} [disposed] + * @property {(payload: object, timeout: number) => object} run + * @property {() => void} dispose + */ + +/** @typedef {import('./waf_context_wrapper')} WAFContextWrapperCtor */ +/** @typedef {InstanceType} WAFContextWrapperInstance */ + +/** + * Thin manager around native DDWAF providing lifecycle and per-request context handling. + */ + const log = require('../../log') const Reporter = require('../reporter') const WAFContextWrapper = require('./waf_context_wrapper') @@ -7,22 +46,37 @@ const WAFContextWrapper = require('./waf_context_wrapper') const contexts = new WeakMap() class WAFManager { + /** @type {string} */ static defaultWafConfigPath = 'datadog/00/ASM_DD/default/config' + /** + * @param {object} rules + * @param {WafConfig} config + */ constructor (rules, config) { + /** @type {WafConfig} */ this.config = config + /** @type {number} */ this.wafTimeout = config.wafTimeout + /** @type {DDWAF} */ this.ddwaf = this._loadDDWAF(rules) + /** @type {string | undefined} */ this.rulesVersion = this.ddwaf.diagnostics.ruleset_version + /** @type {object} */ this.defaultRules = rules Reporter.reportWafInit(this.ddwafVersion, this.rulesVersion, this.ddwaf.diagnostics.rules, true) } + /** + * @param {object} rules + * @returns {DDWAF} + */ _loadDDWAF (rules) { try { // require in `try/catch` because this can throw at require time const { DDWAF } = require('@datadog/native-appsec') + /** @type {string} */ this.ddwafVersion = DDWAF.version() const { obfuscatorKeyRegex, obfuscatorValueRegex } = this.config @@ -37,6 +91,10 @@ class WAFManager { } } + /** + * @param {object} req + * @returns {WAFContextWrapperInstance} + */ getWAFContext (req) { let wafContext = contexts.get(req) @@ -54,29 +112,41 @@ class WAFManager { return wafContext } + /** @returns {void} */ setRulesVersion () { if (this.ddwaf.diagnostics.ruleset_version) { this.rulesVersion = this.ddwaf.diagnostics.ruleset_version } } + /** @returns {void} */ setAsmDdFallbackConfig () { if (!this.ddwaf.configPaths.some(cp => cp.includes('ASM_DD'))) { this.updateConfig(WAFManager.defaultWafConfigPath, this.defaultRules) } } + /** + * @param {string} path + * @param {object} rules + * @returns {boolean} + */ updateConfig (path, rules) { const updateResult = this.ddwaf.createOrUpdateConfig(rules, path) this.setRulesVersion() return updateResult } + /** + * @param {string} path + * @returns {void} + */ removeConfig (path) { this.ddwaf.removeConfig(path) this.setRulesVersion() } + /** @returns {void} */ destroy () { if (this.ddwaf) { this.ddwaf.dispose() diff --git a/packages/dd-trace/src/config-helper.js b/packages/dd-trace/src/config-helper.js index ea9ca30ff43..5fbf351b1dd 100644 --- a/packages/dd-trace/src/config-helper.js +++ b/packages/dd-trace/src/config-helper.js @@ -5,6 +5,13 @@ const { deprecate } = require('util') const { supportedConfigurations, aliases, deprecations } = require('./supported-configurations.json') +/** + * Types for environment variable handling. + * + * @typedef {keyof typeof supportedConfigurations} SupportedEnvKey + * @typedef {Partial & Partial>} TracerEnv + */ + const aliasToCanonical = {} for (const canonical of Object.keys(aliases)) { for (const alias of aliases[canonical]) { @@ -32,7 +39,7 @@ module.exports = { * Returns the environment variables that are supported by the tracer * (including all non-Datadog/OTEL specific environment variables) * - * @returns {Partial} The environment variables + * @returns {TracerEnv} The environment variables */ getEnvironmentVariables () { const configs = {} diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index e7aafdd5bb7..782c0dc05c6 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -342,9 +342,9 @@ class Config { '' )) - const DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH = coalesce( - getEnvironmentVariable('DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH'), - options.cloudPayloadTagging?.maxDepth, + const DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH = Number( + getEnvironmentVariable('DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH') ?? + options.cloudPayloadTagging?.maxDepth ?? 10 ) diff --git a/packages/dd-trace/src/datastreams/index.js b/packages/dd-trace/src/datastreams/index.js index 1704e0ea127..4d37b6ed95f 100644 --- a/packages/dd-trace/src/datastreams/index.js +++ b/packages/dd-trace/src/datastreams/index.js @@ -11,7 +11,14 @@ const { // plugins instead of having dedicated DSM plugins that are themselves // lazy loaded. // -// TODO: Remove this when DSM has been moved to dedicaed plugins. +// TODO: Remove this when DSM has been moved to dedicated plugins. +/** + * @template T extends new (...args: any[]) => any + * @param {() => T} classGetter + * @param {string[]} methods + * @param {string[]} staticMethods + * @returns {T} + */ function lazyClass (classGetter, methods = [], staticMethods = []) { let constructorArgs let ActiveClass @@ -50,22 +57,34 @@ function lazyClass (classGetter, methods = [], staticMethods = []) { return LazyClass } +/** + * @type {typeof import('./pathway').DsmPathwayCodec} + */ const DsmPathwayCodec = lazyClass(() => require('./pathway').DsmPathwayCodec, [], [ 'encode', 'decode' ]) +/** + * @type {typeof import('./checkpointer').DataStreamsCheckpointer} + */ const DataStreamsCheckpointer = lazyClass(() => require('./checkpointer').DataStreamsCheckpointer, [ 'setProduceCheckpoint', 'setConsumeCheckpoint' ]) +/** + * @type {typeof import('./manager').DataStreamsManager} + */ const DataStreamsManager = lazyClass(() => require('./manager').DataStreamsManager, [ 'setCheckpoint', 'decodeDataStreamsContext' ]) // TODO: Are all those methods actually public? +/** + * @type {typeof import('./processor').DataStreamsProcessor} + */ const DataStreamsProcessor = lazyClass(() => require('./processor').DataStreamsProcessor, [ 'onInterval', 'bucketFromTimestamp', @@ -79,6 +98,9 @@ const DataStreamsProcessor = lazyClass(() => require('./processor').DataStreamsP 'getSchema' ]) +/** + * @type {typeof import('./schemas/schema_builder').SchemaBuilder} + */ const SchemaBuilder = lazyClass(() => require('./schemas/schema_builder').SchemaBuilder, [ 'build', 'addProperty', diff --git a/packages/dd-trace/src/llmobs/plugins/ai/index.js b/packages/dd-trace/src/llmobs/plugins/ai/index.js index c040d1a98ed..0d4c9dc8091 100644 --- a/packages/dd-trace/src/llmobs/plugins/ai/index.js +++ b/packages/dd-trace/src/llmobs/plugins/ai/index.js @@ -80,7 +80,7 @@ class VercelAILLMObsPlugin extends BaseLLMObsPlugin { * We use the tool description as the next best identifier for a tool. * * @param {string} toolDescription - * @returns {string} + * @returns {string | undefined} */ findToolName (toolDescription) { for (const availableTool of this.#availableTools) { @@ -91,6 +91,9 @@ class VercelAILLMObsPlugin extends BaseLLMObsPlugin { } } + /** + * @override + */ getLLMObsSpanRegisterOptions (ctx) { const span = ctx.currentStore?.span const operation = getOperation(span) @@ -100,6 +103,9 @@ class VercelAILLMObsPlugin extends BaseLLMObsPlugin { return { kind, name: getLlmObsSpanName(operation, ctx.attributes['ai.telemetry.functionId']) } } + /** + * @override + */ setLLMObsTags (ctx) { const span = ctx.currentStore?.span if (!span) return @@ -212,6 +218,10 @@ class VercelAILLMObsPlugin extends BaseLLMObsPlugin { this._tagger.tagMetadata(span, metadata) } + /** + * @param {import('../../../opentracing/span')} span + * @param {Record} tags + */ setLLMOperationTags (span, tags) { const toolsForModel = tags['ai.prompt.tools']?.map(getJsonStringValue) diff --git a/packages/dd-trace/src/llmobs/plugins/ai/util.js b/packages/dd-trace/src/llmobs/plugins/ai/util.js index d8ae5fc4dce..0a94e4a33c9 100644 --- a/packages/dd-trace/src/llmobs/plugins/ai/util.js +++ b/packages/dd-trace/src/llmobs/plugins/ai/util.js @@ -34,7 +34,7 @@ function getSpanTags (ctx) { * getOperation(span) // 'doGenerate' * * @param {import('../../../opentracing/span')} span - * @returns {string} + * @returns {string | undefined} */ function getOperation (span) { const name = span._name @@ -45,8 +45,9 @@ function getOperation (span) { /** * Get the LLM token usage from the span tags - * @param {Record} tags - * @returns {{inputTokens: number, outputTokens: number, totalTokens: number}} + * @template T extends {inputTokens: number, outputTokens: number, totalTokens: number} + * @param {T} tags + * @returns {Pick} */ function getUsage (tags) { const usage = {} @@ -64,9 +65,10 @@ function getUsage (tags) { /** * Safely JSON parses a string value with a default fallback + * @template T typeof defaultValue * @param {string} str - * @param {any} defaultValue - * @returns {Record | string | Array} + * @param {T} defaultValue + * @returns {Record | string | Array | null | T} */ function getJsonStringValue (str, defaultValue) { let maybeValue = defaultValue @@ -81,7 +83,7 @@ function getJsonStringValue (str, defaultValue) { /** * Get the model metadata from the span tags (top_p, top_k, temperature, etc.) - * @param {import('../../../opentracing/span')} span + * @param {Record} tags * @returns {Record | null} */ function getModelMetadata (tags) { diff --git a/packages/dd-trace/src/noop/proxy.js b/packages/dd-trace/src/noop/proxy.js index 0b83e60e7b7..236b4c76174 100644 --- a/packages/dd-trace/src/noop/proxy.js +++ b/packages/dd-trace/src/noop/proxy.js @@ -10,6 +10,7 @@ const noopAppsec = new NoopAppsecSdk() const noopDogStatsDClient = new NoopDogStatsDClient() const noopLLMObs = new NoopLLMObsSDK(noop) +/** @type {import('../../src/index')} Proxy */ class NoopProxy { constructor () { this._tracer = noop diff --git a/packages/dd-trace/src/opentracing/span.js b/packages/dd-trace/src/opentracing/span.js index 3199f9bcf16..41c0988c82f 100644 --- a/packages/dd-trace/src/opentracing/span.js +++ b/packages/dd-trace/src/opentracing/span.js @@ -146,7 +146,7 @@ class DatadogSpan { } /** - * @returns {DatadogSpanContext} + * @returns {import('../priority_sampler').DatadogSpanContext} */ context () { return this._spanContext diff --git a/packages/dd-trace/src/payload-tagging/config/index.js b/packages/dd-trace/src/payload-tagging/config/index.js index 318e6b47247..f8a118aef84 100644 --- a/packages/dd-trace/src/payload-tagging/config/index.js +++ b/packages/dd-trace/src/payload-tagging/config/index.js @@ -3,6 +3,14 @@ const aws = require('./aws.json') const sdks = { aws } +/** + * Builds rules per service for a given SDK, appending user-provided rules. + * + * @param {Record} sdk + * @param {string[]} requestInput + * @param {string[]} responseInput + * @returns {Record} + */ function getSDKRules (sdk, requestInput, responseInput) { const sdkServiceRules = {} for (const [service, serviceRules] of Object.entries(sdk)) { @@ -17,6 +25,14 @@ function getSDKRules (sdk, requestInput, responseInput) { return sdkServiceRules } +/** + * Appends input rules to all supported SDKs and returns a structure mapping SDK + * names to per-service rules. + * + * @param {string[]} [requestInput=[]] + * @param {string[]} [responseInput=[]] + * @returns {Record>} + */ function appendRules (requestInput = [], responseInput = []) { const sdkRules = {} for (const [name, sdk] of Object.entries(sdks)) { diff --git a/packages/dd-trace/src/payload-tagging/index.js b/packages/dd-trace/src/payload-tagging/index.js index b80ce08d29a..0c004939f53 100644 --- a/packages/dd-trace/src/payload-tagging/index.js +++ b/packages/dd-trace/src/payload-tagging/index.js @@ -14,8 +14,8 @@ const { tagsFromObject } = require('./tagging') /** * Given an identified value, attempt to parse it as JSON if relevant * - * @param {any} value - * @returns {any} the parsed object if parsing was successful, the input if not + * @param {unknown} value + * @returns {unknown} the parsed object if parsing was successful, the input if not */ function maybeJSONParseValue (value) { if (typeof value !== 'string' || value[0] !== '{') { @@ -32,8 +32,8 @@ function maybeJSONParseValue (value) { /** * Apply expansion to all expansion JSONPath queries * - * @param {Object} object - * @param {[String]} expansionRules list of JSONPath queries + * @param {Record} object + * @param {string[]} expansionRules list of JSONPath queries */ function expand (object, expansionRules) { for (const rule of expansionRules) { @@ -46,8 +46,8 @@ function expand (object, expansionRules) { /** * Apply redaction to all redaction JSONPath queries * - * @param {Object} object - * @param {[String]} redactionRules + * @param {Record} object + * @param {string[]} redactionRules */ function redact (object, redactionRules) { for (const rule of redactionRules) { @@ -65,15 +65,10 @@ function redact (object, redactionRules) { * as there are leaf values in the object * This function performs side-effects on a _copy_ of the input object. * - * @param {Object} config sdk configuration for the service - * @param {[String]} config.expand expansion rules for the service - * @param {[String]} config.request redaction rules for the request - * @param {[String]} config.response redaction rules for the response - * @param {Object} object the input object to generate tags from - * @param {Object} opts tag generation options - * @param {String} opts.prefix prefix for all generated tags - * @param {number} opts.maxDepth maximum depth to traverse the object - * @returns + * @param {{ expand: string[], request: string[], response: string[] }} config sdk configuration for the service + * @param {Record} object the input object to generate tags from + * @param {{ prefix: string, maxDepth: number }} opts tag generation options + * @returns {Record} Tags map */ function computeTags (config, object, opts) { const payload = rfdc(object) @@ -84,10 +79,26 @@ function computeTags (config, object, opts) { return tagsFromObject(payload, opts) } +/** + * Compute request tags with the request prefix. + * + * @param {{ expand: string[], request: string[], response: string[] }} config + * @param {Record} object + * @param {{ maxDepth: number }} opts + * @returns {Record} + */ function tagsFromRequest (config, object, opts) { return computeTags(config, object, { ...opts, prefix: PAYLOAD_TAG_REQUEST_PREFIX }) } +/** + * Compute response tags with the response prefix. + * + * @param {{ expand: string[], request: string[], response: string[] }} config + * @param {Record} object + * @param {{ maxDepth: number }} opts + * @returns {Record} + */ function tagsFromResponse (config, object, opts) { return computeTags(config, object, { ...opts, prefix: PAYLOAD_TAG_RESPONSE_PREFIX }) } diff --git a/packages/dd-trace/src/payload-tagging/tagging.js b/packages/dd-trace/src/payload-tagging/tagging.js index 8e31ae40ee3..ca9fe184964 100644 --- a/packages/dd-trace/src/payload-tagging/tagging.js +++ b/packages/dd-trace/src/payload-tagging/tagging.js @@ -8,24 +8,33 @@ const redactedKeys = new Set([ const truncated = 'truncated' const redacted = 'redacted' +/** + * Escapes dots in keys to preserve hierarchy in flattened tag names. + * + * @param {string} key + * @returns {string} + */ function escapeKey (key) { return key.replaceAll('.', String.raw`\.`) } /** - * Compute normalized payload tags from any given object. - * - * @param {object} object - * @param {import('./mask').Mask} mask - * @param {number} maxDepth - * @param {string} prefix - * @returns - */ + * Compute normalized payload tags from any given object. + * + * - Limits total tag count to `PAYLOAD_TAGGING_MAX_TAGS - 1` plus the `_dd.payload_tags_incomplete` flag + * - Truncates values at max depth and for large scalars + * - Redacts known sensitive keys + * + * @param {unknown} object - Input to flatten into tags + * @param {{ maxDepth: number, prefix: string }} opts - Traversal options + * @returns {Record} Map of tag names to values + */ function tagsFromObject (object, opts) { const { maxDepth, prefix } = opts let tagCount = 0 let abort = false + /** @type {Record} */ const result = {} function tagRec (prefix, object, depth = 0) { diff --git a/packages/dd-trace/src/pkg.js b/packages/dd-trace/src/pkg.js index e6b5f03259d..2f96d82fc7f 100644 --- a/packages/dd-trace/src/pkg.js +++ b/packages/dd-trace/src/pkg.js @@ -1,7 +1,7 @@ 'use strict' -const fs = require('fs') -const path = require('path') +const fs = require('node:fs') +const path = require('node:path') function findRoot () { return require.main && require.main.filename @@ -20,8 +20,10 @@ function findPkg () { const filePath = findUp('package.json', root, directory) + if (filePath === undefined) return {} + try { - return JSON.parse(fs.readFileSync(filePath, 'utf8')) + return require(filePath) } catch { return {} } diff --git a/packages/dd-trace/src/plugins/composite.js b/packages/dd-trace/src/plugins/composite.js index a3b422a9f1d..0fd2f5739eb 100644 --- a/packages/dd-trace/src/plugins/composite.js +++ b/packages/dd-trace/src/plugins/composite.js @@ -11,6 +11,9 @@ class CompositePlugin extends Plugin { } } + /** + * @override + */ configure (config) { super.configure(config) for (const name in this.constructor.plugins) { diff --git a/packages/dd-trace/src/plugins/plugin.js b/packages/dd-trace/src/plugins/plugin.js index 8fb26dceff5..7401e8a5e3b 100644 --- a/packages/dd-trace/src/plugins/plugin.js +++ b/packages/dd-trace/src/plugins/plugin.js @@ -6,6 +6,24 @@ const dc = require('dc-polyfill') const logger = require('../log') const { storage } = require('../../../datadog-core') +/** + * Base class for all Datadog plugins. + * + * Subclasses MUST define a static field `id` with the integration identifier + * used across channels, span names, tags and telemetry. + * + * Example: + * ```js + * class MyPlugin extends Plugin { + * static id = 'myframework' + * } + * ``` + * + * Notes about the tracer instance: + * - In some contexts the tracer may be wrapped and available as `{ _tracer: Tracer }`. + * Use the `tracer` getter which normalizes access. + */ + class Subscription { constructor (event, handler) { this._channel = dc.channel(event) @@ -50,6 +68,12 @@ class StoreBinding { } module.exports = class Plugin { + /** + * Create a new plugin instance. + * + * @param {object} tracer Tracer instance or wrapper containing it under `_tracer`. + * @param {object} tracerConfig Global tracer configuration object. + */ constructor (tracer, tracerConfig) { this._subscriptions = [] this._bindings = [] @@ -59,10 +83,22 @@ module.exports = class Plugin { this._tracerConfig = tracerConfig // global tracer configuration } + /** + * Normalized tracer access. Returns the underlying tracer even if wrapped. + * + * @returns {object} + */ get tracer () { return this._tracer?._tracer || this._tracer } + /** + * Enter a context with the provided span bound in storage. + * + * @param {object} span The span to bind as current. + * @param {object=} store Optional existing store to extend; if omitted, uses current store. + * @returns {void} + */ enter (span, store) { store = store || storage('legacy').getStore() storage('legacy').enterWith({ ...store, span }) @@ -74,8 +110,19 @@ module.exports = class Plugin { storage('legacy').enterWith({ noop: true }) } + /** + * Subscribe to a diagnostic channel with automatic error handling and enable/disable lifecycle. + * + * @param {string} channelName Diagnostic channel name. + * @param {(...args: unknown[]) => unknown} handler Handler invoked on messages. + * @returns {void} + */ addSub (channelName, handler) { const plugin = this + /** + * @this {unknown} + * @returns {unknown} + */ const wrappedHandler = function () { try { return handler.apply(this, arguments) @@ -88,10 +135,23 @@ module.exports = class Plugin { this._subscriptions.push(new Subscription(channelName, wrappedHandler)) } + /** + * Bind the tracer store to a diagnostic channel with a transform function. + * + * @param {string} channelName Diagnostic channel name. + * @param {(data: unknown) => object} transform Transform to compute the bound store. + * @returns {void} + */ addBind (channelName, transform) { this._bindings.push(new StoreBinding(channelName, transform)) } + /** + * Attach an error to the current active span (if any). + * + * @param {unknown} error Error object or sentinel value. + * @returns {void} + */ addError (error) { const store = storage('legacy').getStore() @@ -102,6 +162,13 @@ module.exports = class Plugin { } } + /** + * Enable or disable the plugin and (re)apply its configuration. + * + * @param {boolean|object} config Either a boolean to enable/disable or a configuration object + * containing at least `{ enabled: boolean }`. + * @returns {void} + */ configure (config) { if (typeof config === 'boolean') { config = { enabled: config } diff --git a/packages/dd-trace/src/plugins/util/git.js b/packages/dd-trace/src/plugins/util/git.js index b93f0fad544..155f63f9194 100644 --- a/packages/dd-trace/src/plugins/util/git.js +++ b/packages/dd-trace/src/plugins/util/git.js @@ -114,7 +114,7 @@ function isShallowRepository () { function getGitVersion () { const gitVersionString = sanitizedExec('git', ['version']) - const gitVersionMatches = gitVersionString.match(/git version (\d+)\.(\d+)\.(\d+)/) + const gitVersionMatches = /** @type {RegExpMatchArray} */ (gitVersionString.match(/git version (\d+)\.(\d+)\.(\d+)/)) try { return { major: Number.parseInt(gitVersionMatches[1]), diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index c86b9d98a6c..1838863d008 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -49,6 +49,13 @@ const { SAMPLING_RULE_DECISION } = require('../../constants') const { AUTO_KEEP } = require('../../../../../ext/priority') const { version: ddTraceVersion } = require('../../../../../package.json') +/** + * JSDoc types for test environment metadata helpers. + * + * @typedef {{ service?: string, isServiceUserProvided?: boolean }} TestEnvironmentConfig + * @typedef {Record} TestEnvironmentMetadata + */ + // session tags const TEST_SESSION_NAME = 'test_session.name' @@ -331,6 +338,10 @@ function validateUrl (url) { } } +/** + * @param {TestEnvironmentMetadata} metadata + * @returns {TestEnvironmentMetadata} + */ function removeInvalidMetadata (metadata) { return Object.keys(metadata).reduce((filteredTags, tag) => { if (tag === GIT_REPOSITORY_URL && !validateGitRepositoryUrl(metadata[GIT_REPOSITORY_URL])) { @@ -444,6 +455,13 @@ function checkShaDiscrepancies (ciMetadata, userProvidedGitMetadata) { ) } +/** + * Build environment metadata for tests by merging CI, Git, runtime/OS and user-provided metadata. + * + * @param {string=} testFramework + * @param {TestEnvironmentConfig=} config + * @returns {TestEnvironmentMetadata} + */ function getTestEnvironmentMetadata (testFramework, config, shouldSkipGitMetadataExtraction = false) { const ciMetadata = getCIMetadata() const userProvidedGitMetadata = getUserProviderGitMetadata() @@ -479,6 +497,7 @@ function getTestEnvironmentMetadata (testFramework, config, shouldSkipGitMetadat }) } + /** @type {TestEnvironmentMetadata} */ const runtimeAndOSMetadata = getRuntimeAndOSMetadata() const metadata = { diff --git a/packages/dd-trace/src/priority_sampler.js b/packages/dd-trace/src/priority_sampler.js index 0a0c0bbb991..45fae148d16 100644 --- a/packages/dd-trace/src/priority_sampler.js +++ b/packages/dd-trace/src/priority_sampler.js @@ -46,8 +46,9 @@ const defaultSampler = new Sampler(AUTO_KEEP) * @class PrioritySampler * @typedef {import('./opentracing/span')} DatadogSpan * @typedef {import('./opentracing/span_context')} DatadogSpanContext - * @typedef {import('./standalone/product')} PRODUCTS + * @typedef {{ id: number, mechanism?: number }} Product * @typedef {2|-1|1|0} SamplingPriority Empirically defined sampling priorities. + * @typedef {import('./sampling_rule')|Record} SamplingRuleLike */ class PrioritySampler { /** @@ -55,12 +56,12 @@ class PrioritySampler { * * @typedef {Object} SamplingConfig * @property {number} [sampleRate] - The default sample rate for traces. - * @property {string} [provenance] - The provenance of the sampling rule (e.g., "customer", "dynamic"). + * @property {string} [provenance] - Optional rule provenance ("customer" or "dynamic"). * @property {number} [rateLimit=100] - The maximum number of traces to sample per second. - * @property {Array} [rules=[]] - An array of sampling rules to apply. + * @property {Array|Array>} [rules=[]] - Sampling rules or configs. * * @param {string} env - The environment name (e.g., "production", "staging"). - * @param {SamplingConfig} config - The configuration object for sampling. + * @param {SamplingConfig} [config] - The configuration object for sampling. */ constructor (env, config) { this.configure(env, config) @@ -69,22 +70,22 @@ class PrioritySampler { /** * - * @param env {string} - * @param opts {SamplingConfig} + * @param {string} env + * @param {SamplingConfig} config */ - configure (env, opts = {}) { - const { sampleRate, provenance, rateLimit = 100, rules } = opts + configure (env, config = {}) { + const { sampleRate, provenance, rateLimit = 100, rules } = config this._env = env this._rules = this.#normalizeRules(rules || [], sampleRate, rateLimit, provenance) this._limiter = new RateLimiter(rateLimit) - log.trace(env, opts) + log.trace(env, config) setSamplingRules(this._rules) } /** - * @param span {DatadogSpan} - * @returns {boolean} + * @param {DatadogSpan} span + * @returns {boolean} True if the trace should be sampled based on priority. */ isSampled (span) { const priority = this._getPriorityFromAuto(span) @@ -93,9 +94,10 @@ class PrioritySampler { } /** + * Assigns a sampling priority to a span if not already set. * - * @param span {DatadogSpan} - * @param auto {boolean} + * @param {DatadogSpan} span + * @param {boolean} [auto=true] - Whether to use automatic sampling if no manual tags are present. * @returns {void} */ sample (span, auto = true) { @@ -125,8 +127,9 @@ class PrioritySampler { } /** + * Updates agent-provided sampling rates keyed by `service:,env:`. * - * @param rates {Record} + * @param {Record} rates * @returns {void} */ update (rates) { @@ -145,8 +148,9 @@ class PrioritySampler { } /** + * Validates that a sampling priority value is one of the allowed constants. * - * @param samplingPriority {SamplingPriority} + * @param {SamplingPriority|undefined} samplingPriority * @returns {boolean} */ validate (samplingPriority) { @@ -162,10 +166,11 @@ class PrioritySampler { } /** + * Explicitly sets the priority and mechanism for the span's trace. * - * @param span {DatadogSpan} - * @param samplingPriority {SamplingPriority} - * @param product {import('./standalone/product')} + * @param {DatadogSpan} span + * @param {SamplingPriority} samplingPriority + * @param {Product} [product] */ setPriority (span, samplingPriority, product) { if (!span || !this.validate(samplingPriority)) return @@ -189,17 +194,21 @@ class PrioritySampler { } /** + * Returns the span context, accepting either a span or a span context. * - * @param span {DatadogSpan} + * @param {DatadogSpan|DatadogSpanContext} span * @returns {DatadogSpanContext} */ _getContext (span) { - return typeof span.context === 'function' ? span.context() : span + return typeof /** @type {DatadogSpan} */ (span).context === 'function' + ? /** @type {DatadogSpan} */ (span).context() + : /** @type {DatadogSpanContext} */ (span) } /** + * Computes priority using rules and agent rates when no manual tag is present. * - * @param span {DatadogSpan} + * @param {DatadogSpan} span * @returns {SamplingPriority} */ _getPriorityFromAuto (span) { @@ -212,11 +221,12 @@ class PrioritySampler { } /** - * - * @param tags {Record} + * Computes priority from manual sampling tags if present. * Included for compatibility with {@link import('./standalone/tracesource_priority_sampler')._getPriorityFromTags} - * @param _context {DatadogSpanContext} - * @returns {SamplingPriority} + * + * @param {Record} tags + * @param {DatadogSpanContext} _context + * @returns {SamplingPriority|undefined} */ _getPriorityFromTags (tags, _context) { if (Object.hasOwn(tags, MANUAL_KEEP) && tags[MANUAL_KEEP] !== false) { @@ -224,19 +234,23 @@ class PrioritySampler { } else if (Object.hasOwn(tags, MANUAL_DROP) && tags[MANUAL_DROP] !== false) { return USER_REJECT } - const priority = Number.parseInt(tags[SAMPLING_PRIORITY], 10) - - if (priority === 1 || priority === 2) { - return USER_KEEP - } else if (priority === 0 || priority === -1) { - return USER_REJECT + const rawPriority = tags[SAMPLING_PRIORITY] + if (rawPriority !== undefined) { + const priority = Number.parseInt(String(rawPriority), 10) + + if (priority === 1 || priority === 2) { + return USER_KEEP + } else if (priority === 0 || priority === -1) { + return USER_REJECT + } } } /** + * Applies a matching rule and rate limit to compute the sampling priority. * - * @param context {DatadogSpanContext} - * @param rule {SamplingRule} + * @param {DatadogSpanContext} context + * @param {import('./sampling_rule')} rule * @returns {SamplingPriority} */ #getPriorityByRule (context, rule) { @@ -251,12 +265,14 @@ class PrioritySampler { } /** + * Checks if the rate limiter allows sampling for the current window and + * records the effective rate on the trace. * - * @param context {DatadogSpanContext} + * @param {DatadogSpanContext} context * @returns {boolean} - * @private */ _isSampledByRateLimit (context) { + // TODO: Change underscored properties to private ones. const allowed = this._limiter.isAllowed() context._trace[SAMPLING_LIMIT_DECISION] = this._limiter.effectiveRate() @@ -265,12 +281,15 @@ class PrioritySampler { } /** + * Computes priority using agent-provided sampling rates. * - * @param context {DatadogSpanContext} + * @param {DatadogSpanContext} context * @returns {SamplingPriority} */ #getPriorityByAgent (context) { const key = `service:${context._tags[SERVICE_NAME]},env:${this._env}` + // TODO: Change underscored properties to private ones. + // TODO: Add default sampler to the samplers object. const sampler = this._samplers[key] || this._samplers[DEFAULT_KEY] context._trace[SAMPLING_AGENT_DECISION] = sampler.rate() @@ -281,8 +300,9 @@ class PrioritySampler { } /** + * Tags the trace with a decision maker when priority is keep, or removes it otherwise. * - * @param span {DatadogSpan} + * @param {DatadogSpan} span * @returns {void} */ #addDecisionMaker (span) { @@ -301,11 +321,13 @@ class PrioritySampler { } /** - * @param {Record[] | Record} rules - The sampling rules to normalize. - * @param {number} sampleRate + * Normalizes rule inputs to SamplingRule instances, applying defaults. + * + * @param {Array|SamplingRuleLike} rules - Rules to normalize. + * @param {number|undefined} sampleRate * @param {number} rateLimit - * @param {string} provenance - * @returns {SamplingRule[]} + * @param {string|undefined} provenance + * @returns {Array} */ #normalizeRules (rules, sampleRate, rateLimit, provenance) { rules = Array.isArray(rules) ? rules.flat() : [rules] @@ -314,7 +336,7 @@ class PrioritySampler { const result = [] for (const rule of rules) { - const sampleRate = Number.parseFloat(rule.sampleRate) + const sampleRate = Number.parseFloat(String(rule.sampleRate)) // TODO(BridgeAR): Debug logging invalid rules fails our tests. // Should we definitely not know about these? if (!Number.isNaN(sampleRate)) { @@ -325,11 +347,13 @@ class PrioritySampler { } /** + * Finds the first matching rule for the given span. * - * @param span {DatadogSpan} - * @returns {SamplingRule|undefined} + * @param {DatadogSpan} span + * @returns {import('./sampling_rule')|undefined} */ #findRule (span) { + // TODO: Change underscored properties to private ones. for (const rule of this._rules) { // Rule is a special object with a .match() property. // It has nothing to do with a regular expression. @@ -339,9 +363,10 @@ class PrioritySampler { } /** + * Convenience helper to keep a trace with an optional product mechanism. * - * @param span {DatadogSpan} - * @param product {import('./standalone/product')} + * @param {DatadogSpan} span + * @param {Product} [product] */ static keepTrace (span, product) { span?._prioritySampler?.setPriority(span, USER_KEEP, product) diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index 1fbc744b99b..19b6ebc78a4 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -88,6 +88,9 @@ class Tracer extends NoopProxy { } } + /** + * @override + */ init (options) { if (this._initialized) return this @@ -261,6 +264,9 @@ class Tracer extends NoopProxy { } } + /** + * @override + */ profilerStarted () { if (!this._profilerStarted) { // injection hardening: this is only ever invoked from tests. @@ -269,11 +275,17 @@ class Tracer extends NoopProxy { return this._profilerStarted } + /** + * @override + */ use () { this._pluginManager.configurePlugin(...arguments) return this } + /** + * @override + */ get TracerProvider () { return require('./opentelemetry/tracer_provider') } diff --git a/packages/dd-trace/src/rate_limiter.js b/packages/dd-trace/src/rate_limiter.js index 3789ffaeb72..baab25288f3 100644 --- a/packages/dd-trace/src/rate_limiter.js +++ b/packages/dd-trace/src/rate_limiter.js @@ -3,14 +3,25 @@ const limiter = require('limiter') class RateLimiter { + /** + * @param {number} rateLimit - Allowed units per interval. Negative means unlimited, 0 disables. + * @param {'second'|'minute'|'hour'|'day'} [interval='second'] - Time window for the limiter. + */ constructor (rateLimit, interval = 'second') { - this._rateLimit = Number.parseInt(rateLimit) + this._rateLimit = Number.parseInt(String(rateLimit)) + // The limiter constructor accepts a token count number and an interval string this._limiter = new limiter.RateLimiter(this._rateLimit, interval) this._tokensRequested = 0 this._prevIntervalTokens = 0 this._prevTokensRequested = 0 } + /** + * Attempts to consume a token and reports whether it was allowed. + * Updates internal counters used for effective rate computation. + * + * @returns {boolean} + */ isAllowed () { const curIntervalStart = this._limiter.curIntervalStart const curIntervalTokens = this._limiter.tokensThisInterval @@ -27,6 +38,12 @@ class RateLimiter { return allowed } + /** + * Returns the fraction of allowed requests over requested ones in the + * current and previous intervals combined. + * + * @returns {number} + */ effectiveRate () { if (this._rateLimit < 0) return 1 if (this._rateLimit === 0) return 0 @@ -38,6 +55,10 @@ class RateLimiter { return allowed / requested } + /** + * Internal token consumption without counter side-effects. + * @returns {boolean} + */ _isAllowed () { if (this._rateLimit < 0) return true if (this._rateLimit === 0) return false @@ -45,6 +66,10 @@ class RateLimiter { return this._limiter.tryRemoveTokens(1) } + /** + * Effective rate within the current interval only. + * @returns {number} + */ _currentWindowRate () { if (this._rateLimit < 0) return 1 if (this._rateLimit === 0) return 0 diff --git a/packages/dd-trace/src/sampling_rule.js b/packages/dd-trace/src/sampling_rule.js index e5b03cb7d94..d76b1d9ad60 100644 --- a/packages/dd-trace/src/sampling_rule.js +++ b/packages/dd-trace/src/sampling_rule.js @@ -4,18 +4,53 @@ const { globMatch } = require('../src/util') const RateLimiter = require('./rate_limiter') const Sampler = require('./sampler') +/** + * Typedefs for clarity when matching spans. + * + * @typedef {import('./opentracing/span')} DatadogSpan + * @typedef {import('./opentracing/span_context')} DatadogSpanContext + * + * @callback Locator + * A function that derives a string subject from a span. + * @param {DatadogSpan} span + * @returns {string|undefined} + * + * @typedef {object} RuleMatcher + * @property {(span: DatadogSpan) => boolean} match - Returns true if the span matches. + */ + +/** + * Matcher that always returns true. + * Implements the minimal `RuleMatcher` interface. + * @implements {RuleMatcher} + */ class AlwaysMatcher { - match () { + /** + * @param {DatadogSpan} span + * @returns {boolean} + */ + match (span) { return true } } +/** + * Matcher that evaluates a glob pattern against a derived subject. + */ class GlobMatcher { + /** + * @param {string} pattern - Glob pattern used to match the subject. + * @param {Locator} locator - Function extracting the subject to match. + */ constructor (pattern, locator) { this.pattern = pattern this.locator = locator } + /** + * @param {DatadogSpan} span + * @returns {boolean} + */ match (span) { const subject = this.locator(span) if (!subject) return false @@ -23,12 +58,23 @@ class GlobMatcher { } } +/** + * Matcher that evaluates a regular expression against a derived subject. + */ class RegExpMatcher { + /** + * @param {RegExp} pattern - Regular expression used to test the subject. + * @param {Locator} locator - Function extracting the subject to test. + */ constructor (pattern, locator) { this.pattern = pattern this.locator = locator } + /** + * @param {DatadogSpan} span + * @returns {boolean} + */ match (span) { const subject = this.locator(span) if (!subject) return false @@ -36,6 +82,15 @@ class RegExpMatcher { } } +/** + * Creates a matcher for the provided pattern and locator. + * Returns a glob matcher for non-trivial strings, a regexp matcher for RegExp, + * or an always-true matcher for wildcard or missing patterns. + * + * @param {string|RegExp|undefined} pattern + * @param {Locator} locator + * @returns {RuleMatcher} + */ function matcher (pattern, locator) { if (pattern instanceof RegExp) { return new RegExpMatcher(pattern, locator) @@ -47,14 +102,32 @@ function matcher (pattern, locator) { return new AlwaysMatcher() } +/** + * Creates a locator that reads a specific tag from the span context. + * + * @param {string} tag + * @returns {Locator} + */ function makeTagLocator (tag) { return (span) => span.context()._tags[tag] } +/** + * Extracts the operation name from the span context. + * + * @param {DatadogSpan} span + * @returns {string|undefined} + */ function nameLocator (span) { return span.context()._name } +/** + * Extracts the service name from the span context or tracer configuration. + * + * @param {DatadogSpan} span + * @returns {string|undefined} + */ function serviceLocator (span) { const { _tags: tags } = span.context() return tags.service || @@ -62,13 +135,39 @@ function serviceLocator (span) { span.tracer()._service } +/** + * Extracts the resource name from the span context. + * + * @param {DatadogSpan} span + * @returns {string|undefined} + */ function resourceLocator (span) { const { _tags: tags } = span.context() return tags.resource || tags['resource.name'] } +/** + * Configuration options for a sampling rule. + * + * @typedef {object} SamplingRuleConfig + * @property {string|RegExp} [name] - Match on span operation name. + * @property {string|RegExp} [service] - Match on service name. + * @property {string|RegExp} [resource] - Match on resource name. + * @property {Record} [tags] - Match on specific tag values by key. + * @property {number} [sampleRate=1] - Deterministic sampling rate in [0, 1]. + * @property {string} [provenance] - Optional provenance/metadata for this rule. + * @property {number} [maxPerSecond] - Maximum samples per second (rate limit). + */ + +/** + * SamplingRule encapsulates matching criteria and sampling/limiting behavior + * to decide whether a span should be sampled. + */ class SamplingRule { + /** + * @param {SamplingRuleConfig} [config] + */ constructor ({ name, service, resource, tags, sampleRate = 1, provenance, maxPerSecond } = {}) { this.matchers = [] @@ -94,22 +193,45 @@ class SamplingRule { } } + /** + * Constructs a SamplingRule from the given configuration. + * @param {SamplingRuleConfig} config + * @returns {SamplingRule} + */ static from (config) { return new SamplingRule(config) } + /** + * Deterministic sampling rate in [0, 1]. + * @returns {number} + */ get sampleRate () { return this._sampler.rate() } + /** + * Effective rate applied by the rate limiter, if configured. + * @returns {number|undefined} + */ get effectiveRate () { return this._limiter && this._limiter.effectiveRate() } + /** + * Maximum samples per second if a limiter is present. + * @returns {number|undefined} + */ get maxPerSecond () { return this._limiter && this._limiter._rateLimit } + /** + * Checks whether the provided span matches all configured criteria. + * + * @param {DatadogSpan} span + * @returns {boolean} + */ match (span) { for (const matcher of this.matchers) { // Rule is a special object with a .match() property. @@ -126,7 +248,7 @@ class SamplingRule { /** * Determines whether a span should be sampled based on the configured sampling rule. * - * @param {Span|SpanContext} span - The span or span context to evaluate. + * @param {DatadogSpan|DatadogSpanContext} span - The span or span context to evaluate. * @returns {boolean} `true` if the span should be sampled, otherwise `false`. */ sample (span) { diff --git a/packages/dd-trace/src/span_sampler.js b/packages/dd-trace/src/span_sampler.js index d39ceac6b5d..9810b536787 100644 --- a/packages/dd-trace/src/span_sampler.js +++ b/packages/dd-trace/src/span_sampler.js @@ -3,11 +3,23 @@ const { USER_KEEP, AUTO_KEEP } = require('../../../ext').priority const SamplingRule = require('./sampling_rule') +/** + * Samples individual spans within a trace using span-level rules. + */ class SpanSampler { + /** + * @param {{ spanSamplingRules?: Array|Array> }} [config] + */ constructor ({ spanSamplingRules = [] } = {}) { this._rules = spanSamplingRules.map(SamplingRule.from) } + /** + * Finds the first matching span sampling rule for the given span. + * + * @param {import('./opentracing/span')} context + * @returns {import('./sampling_rule')|undefined} + */ findRule (context) { for (const rule of this._rules) { // Rule is a special object with a .match() property. @@ -19,6 +31,13 @@ class SpanSampler { } } + /** + * Applies span sampling to spans in the trace, tagging matching spans with + * span sampling metadata when appropriate. + * + * @param {import('./opentracing/span_context')} spanContext + * @returns {void} + */ sample (spanContext) { const decision = spanContext._sampling.priority if (decision === USER_KEEP || decision === AUTO_KEEP) return diff --git a/packages/dd-trace/src/standalone/product.js b/packages/dd-trace/src/standalone/product.js index ff76a6234da..e78430a096b 100644 --- a/packages/dd-trace/src/standalone/product.js +++ b/packages/dd-trace/src/standalone/product.js @@ -3,6 +3,12 @@ const { SAMPLING_MECHANISM_APPSEC } = require('../constants') const RateLimiter = require('../rate_limiter') +/** + * Returns a rate limiter tuned for the provided product configuration. + * + * @param {{ appsec?: { enabled?: boolean }, iast?: { enabled?: boolean } } | undefined} config + * @returns {import('../rate_limiter')} + */ function getProductRateLimiter (config) { if (config?.appsec?.enabled || config?.iast?.enabled) { return new RateLimiter(1, 'minute') // onePerMinute @@ -11,6 +17,9 @@ function getProductRateLimiter (config) { return new RateLimiter(0) // dropAll } +/** + * Available products and their identifiers/mechanisms. + */ const PRODUCTS = { APM: { id: 1 << 0 }, ASM: { id: 1 << 1, mechanism: SAMPLING_MECHANISM_APPSEC }, diff --git a/packages/dd-trace/src/standalone/tracesource.js b/packages/dd-trace/src/standalone/tracesource.js index 67704436df7..06e4db6061f 100644 --- a/packages/dd-trace/src/standalone/tracesource.js +++ b/packages/dd-trace/src/standalone/tracesource.js @@ -2,15 +2,30 @@ const { TRACE_SOURCE_PROPAGATION_KEY } = require('../constants') +/** + * Adds or updates the trace source propagation tag with the given product bit. + * + * @param {Record|undefined} tags + * @param {{ id: number }|undefined} product + * @returns {Record|undefined} + */ function addTraceSourceTag (tags, product) { if (tags && product) { - const actual = tags[TRACE_SOURCE_PROPAGATION_KEY] ? Number.parseInt(tags[TRACE_SOURCE_PROPAGATION_KEY], 16) : 0 + const actual = tags[TRACE_SOURCE_PROPAGATION_KEY] + ? Number.parseInt(String(tags[TRACE_SOURCE_PROPAGATION_KEY]), 16) + : 0 tags[TRACE_SOURCE_PROPAGATION_KEY] = ((actual | product.id) >>> 0).toString(16).padStart(2, '0') } return tags } +/** + * Returns true when the trace source propagation tag exists on the given tags object. + * + * @param {Record} tags + * @returns {boolean} + */ function hasTraceSourcePropagationTag (tags) { return Object.hasOwn(tags, TRACE_SOURCE_PROPAGATION_KEY) } diff --git a/packages/dd-trace/src/standalone/tracesource_priority_sampler.js b/packages/dd-trace/src/standalone/tracesource_priority_sampler.js index 33963dbf4e0..7d822e5630b 100644 --- a/packages/dd-trace/src/standalone/tracesource_priority_sampler.js +++ b/packages/dd-trace/src/standalone/tracesource_priority_sampler.js @@ -8,12 +8,19 @@ const { addTraceSourceTag, hasTraceSourcePropagationTag } = require('./tracesour const { getProductRateLimiter } = require('./product') class TraceSourcePrioritySampler extends PrioritySampler { + /** + * @override + */ configure (env, sampler, config) { // rules not supported this._env = env this._limiter = getProductRateLimiter(config) } + /** + * @override + * @returns {import('../priority_sampler').SamplingPriority|undefined} + */ _getPriorityFromTags (tags, context) { if (Object.hasOwn(tags, MANUAL_KEEP) && tags[MANUAL_KEEP] !== false && @@ -23,6 +30,9 @@ class TraceSourcePrioritySampler extends PrioritySampler { } } + /** + * @override + */ _getPriorityFromAuto (span) { const context = this._getContext(span) @@ -35,6 +45,9 @@ class TraceSourcePrioritySampler extends PrioritySampler { return this._isSampledByRateLimit(context) ? AUTO_KEEP : AUTO_REJECT } + /** + * @override + */ setPriority (span, samplingPriority, product) { super.setPriority(span, samplingPriority, product) diff --git a/packages/dd-trace/src/startup-log.js b/packages/dd-trace/src/startup-log.js index de3743a304b..25c2f24cb0f 100644 --- a/packages/dd-trace/src/startup-log.js +++ b/packages/dd-trace/src/startup-log.js @@ -13,12 +13,18 @@ let pluginManager let samplingRules = [] let alreadyRan = false +/** + * @returns {Record} + */ function getIntegrationsAndAnalytics () { return { integrations_loaded: Object.keys(pluginManager._pluginsByName) } } +/** + * @param {{ agentError: { code: string, message: string } }} [options] + */ function startupLog ({ agentError } = {}) { if (!config || !pluginManager) { return @@ -34,7 +40,7 @@ function startupLog ({ agentError } = {}) { return } - const out = tracerInfo({ agentError }) + const out = tracerInfo() if (agentError) { out.agent_error = agentError.message @@ -50,6 +56,9 @@ function startupLog ({ agentError } = {}) { } } +/** + * @returns {Record} + */ function tracerInfo () { const url = config.url || `http://${config.hostname || defaults.hostname}:${config.port}` @@ -94,14 +103,23 @@ function tracerInfo () { return out } +/** + * @param {import('./config')} aConfig + */ function setStartupLogConfig (aConfig) { config = aConfig } +/** + * @param {import('./plugin_manager')} thePluginManager + */ function setStartupLogPluginManager (thePluginManager) { pluginManager = thePluginManager } +/** + * @param {import('./sampling_rule')} theRules + */ function setSamplingRules (theRules) { samplingRules = theRules } diff --git a/packages/dd-trace/test/appsec/iast/analyzers/ldap-injection-analyzer.ldapjs.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/ldap-injection-analyzer.ldapjs.plugin.spec.js index da5664fe3a8..fa792a64487 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/ldap-injection-analyzer.ldapjs.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/ldap-injection-analyzer.ldapjs.plugin.spec.js @@ -27,7 +27,7 @@ describe('ldap-injection-analyzer with ldapjs', () => { client = ldapjs.createClient({ url: 'ldap://localhost:1389' }) - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { client.bind(`cn=admin,${base}`, 'adminpassword', (err) => { if (err) { reject(err) @@ -35,7 +35,7 @@ describe('ldap-injection-analyzer with ldapjs', () => { resolve() } }) - }) + })) }) afterEach((done) => { @@ -81,7 +81,7 @@ describe('ldap-injection-analyzer with ldapjs', () => { describe('context is not null after search end event', () => { testThatRequestHasVulnerability(() => { - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { const store = storage('legacy').getStore() const iastCtx = iastContextFunctions.getIastContext(store) @@ -100,13 +100,13 @@ describe('ldap-injection-analyzer with ldapjs', () => { resolve() }).on('error', reject) }) - }) + })) }, 'LDAP_INJECTION') }) describe('remove listener should work as expected', () => { testThatRequestHasVulnerability(() => { - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { const store = storage('legacy').getStore() const iastCtx = iastContextFunctions.getIastContext(store) @@ -131,13 +131,13 @@ describe('ldap-injection-analyzer with ldapjs', () => { searchRes.on('end', onSearchEnd) }) - }) + })) }, 'LDAP_INJECTION') }) describe('search inside bind should detect the vulnerability and not lose the context', () => { testThatRequestHasVulnerability(() => { - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { client.bind(`cn=admin,${base}`, 'adminpassword', (err) => { if (err) { reject(err) @@ -162,7 +162,7 @@ describe('ldap-injection-analyzer with ldapjs', () => { }) } }) - }) + })) }, 'LDAP_INJECTION') }) }) diff --git a/packages/dd-trace/test/appsec/index.express.plugin.spec.js b/packages/dd-trace/test/appsec/index.express.plugin.spec.js index dd7656f2eb5..84d40e1b72e 100644 --- a/packages/dd-trace/test/appsec/index.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.express.plugin.spec.js @@ -1,9 +1,11 @@ 'use strict' -const semver = require('semver') const Axios = require('axios') -const { assert } = require('chai') +const semver = require('semver') const sinon = require('sinon') +const { describe, it, before, beforeEach, afterEach, after } = require('mocha') + +const assert = require('node:assert/strict') const path = require('node:path') const zlib = require('node:zlib') @@ -21,7 +23,9 @@ withVersions('express', 'express', version => { } describe('Suspicious request blocking - path parameters', () => { - let server, paramCallbackSpy, axios + let axios /** @type {AxiosInstance} */ + let server + let paramCallbackSpy /** @type {SinonSpy} */ before(() => { return agent.load(['express', 'http'], { client: false }) @@ -326,8 +330,8 @@ withVersions('express', 'express', version => { await agent.assertSomeTraces((traces) => { const span = traces[0][0] - assert.property(span.meta, '_dd.appsec.s.req.body') - assert.notProperty(span.meta, '_dd.appsec.s.res.body') + assert.ok(Object.hasOwn(span.meta, '_dd.appsec.s.req.body')) + assert.ok(!Object.hasOwn(span.meta, '_dd.appsec.s.res.body')) assert.equal(span.meta['_dd.appsec.s.req.body'], expectedRequestBodySchema) }) @@ -384,8 +388,8 @@ withVersions('express', 'express', version => { await agent.assertSomeTraces((traces) => { const span = traces[0][0] - assert.notProperty(span.meta, '_dd.appsec.s.req.body') - assert.notProperty(span.meta, '_dd.appsec.s.res.body') + assert(!Object.hasOwn(span.meta, '_dd.appsec.s.req.body')) + assert(!Object.hasOwn(span.meta, '_dd.appsec.s.res.body')) }) assert.equal(res.status, 200) diff --git a/packages/dd-trace/test/appsec/rasp/command_injection.express.plugin.spec.js b/packages/dd-trace/test/appsec/rasp/command_injection.express.plugin.spec.js index ad0816221cd..e2435fafb73 100644 --- a/packages/dd-trace/test/appsec/rasp/command_injection.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/rasp/command_injection.express.plugin.spec.js @@ -1,13 +1,16 @@ 'use strict' +const Axios = require('axios') +const assert = require('node:assert/strict') +const { describe, it, before, beforeEach, after } = require('mocha') + +const path = require('node:path') + const agent = require('../../plugins/agent') const appsec = require('../../../src/appsec') const Config = require('../../../src/config') const { withVersions } = require('../../setup/mocha') -const path = require('path') -const Axios = require('axios') const { checkRaspExecutedAndHasThreat, checkRaspExecutedAndNotThreat } = require('./utils') -const { assert } = require('chai') describe('RASP - command_injection', () => { withVersions('express', 'express', expressVersion => { diff --git a/packages/dd-trace/test/appsec/rasp/command_injection.integration.spec.js b/packages/dd-trace/test/appsec/rasp/command_injection.integration.spec.js index 74e235c9c08..68d85cd6209 100644 --- a/packages/dd-trace/test/appsec/rasp/command_injection.integration.spec.js +++ b/packages/dd-trace/test/appsec/rasp/command_injection.integration.spec.js @@ -1,9 +1,11 @@ 'use strict' - -const { createSandbox, FakeAgent, spawnProc } = require('../../../../../integration-tests/helpers') -const path = require('path') const Axios = require('axios') const { assert } = require('chai') +const { describe, it, before, beforeEach, afterEach, after } = require('mocha') + +const path = require('node:path') + +const { createSandbox, FakeAgent, spawnProc } = require('../../../../../integration-tests/helpers') describe('RASP - command_injection - integration', () => { let axios, sandbox, cwd, appFile, agent, proc @@ -35,7 +37,7 @@ describe('RASP - command_injection - integration', () => { DD_TRACE_DEBUG: 'true', DD_APPSEC_ENABLED: 'true', DD_APPSEC_RASP_ENABLED: 'true', - DD_TELEMETRY_HEARTBEAT_INTERVAL: 1, + DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', DD_APPSEC_RULES: path.join(cwd, 'resources', 'rasp_rules.json') } }) diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 4e85c3deba3..0c2ff5a5a8c 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -563,7 +563,7 @@ describe('Config', () => { process.env.DD_APPSEC_RASP_COLLECT_REQUEST_BODY = 'true' process.env.DD_APPSEC_RASP_ENABLED = 'false' process.env.DD_APPSEC_RULES = RULES_JSON_PATH - process.env.DD_APPSEC_SCA_ENABLED = true + process.env.DD_APPSEC_SCA_ENABLED = 'true' process.env.DD_APPSEC_STACK_TRACE_ENABLED = 'false' process.env.DD_APPSEC_TRACE_RATE_LIMIT = '42' process.env.DD_APPSEC_WAF_TIMEOUT = '42' @@ -583,12 +583,12 @@ describe('Config', () => { process.env.DD_HEAP_SNAPSHOT_COUNT = '1' process.env.DD_HEAP_SNAPSHOT_DESTINATION = '/tmp' process.env.DD_HEAP_SNAPSHOT_INTERVAL = '1800' - process.env.DD_IAST_DB_ROWS_TO_TAINT = 2 - process.env.DD_IAST_DEDUPLICATION_ENABLED = false + process.env.DD_IAST_DB_ROWS_TO_TAINT = '2' + process.env.DD_IAST_DEDUPLICATION_ENABLED = 'false' process.env.DD_IAST_ENABLED = 'true' process.env.DD_IAST_MAX_CONCURRENT_REQUESTS = '3' process.env.DD_IAST_MAX_CONTEXT_OPERATIONS = '4' - process.env.DD_IAST_REDACTION_ENABLED = false + process.env.DD_IAST_REDACTION_ENABLED = 'false' process.env.DD_IAST_REDACTION_NAME_PATTERN = 'REDACTION_NAME_PATTERN' process.env.DD_IAST_REDACTION_VALUE_PATTERN = 'REDACTION_VALUE_PATTERN' process.env.DD_IAST_REQUEST_SAMPLING = '40' @@ -601,8 +601,8 @@ describe('Config', () => { process.env.DD_INSTRUMENTATION_INSTALL_ID = '68e75c48-57ca-4a12-adfc-575c4b05fcbe' process.env.DD_INSTRUMENTATION_INSTALL_TIME = '1703188212' process.env.DD_INSTRUMENTATION_INSTALL_TYPE = 'k8s_single_step' - process.env.DD_LANGCHAIN_SPAN_CHAR_LIMIT = 50 - process.env.DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE = 0.5 + process.env.DD_LANGCHAIN_SPAN_CHAR_LIMIT = '50' + process.env.DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE = '0.5' process.env.DD_LLMOBS_AGENTLESS_ENABLED = 'true' process.env.DD_LLMOBS_ML_APP = 'myMlApp' process.env.DD_PROFILING_ENABLED = 'true' @@ -653,8 +653,8 @@ describe('Config', () => { process.env.DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = 'v1' process.env.DD_TRACING_ENABLED = 'false' process.env.DD_VERSION = '1.0.0' - process.env.DD_VERTEXAI_SPAN_CHAR_LIMIT = 50 - process.env.DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE = 0.5 + process.env.DD_VERTEXAI_SPAN_CHAR_LIMIT = '50' + process.env.DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE = '0.5' // required if we want to check updates to config.debug and config.logLevel which is fetched from logger reloadLoggerAndConfig() @@ -1413,8 +1413,8 @@ describe('Config', () => { process.env.DD_APPSEC_RASP_ENABLED = 'true' process.env.DD_APPSEC_RULES = RECOMMENDED_JSON_PATH process.env.DD_APPSEC_STACK_TRACE_ENABLED = 'true' - process.env.DD_APPSEC_TRACE_RATE_LIMIT = 11 - process.env.DD_APPSEC_WAF_TIMEOUT = 11 + process.env.DD_APPSEC_TRACE_RATE_LIMIT = '11' + process.env.DD_APPSEC_WAF_TIMEOUT = '11' process.env.DD_CODE_ORIGIN_FOR_SPANS_ENABLED = 'false' process.env.DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED = 'true' process.env.DD_DOGSTATSD_PORT = '5218' @@ -1432,7 +1432,7 @@ describe('Config', () => { process.env.DD_IAST_STACK_TRACE_ENABLED = 'true' process.env.DD_LLMOBS_AGENTLESS_ENABLED = 'true' process.env.DD_LLMOBS_ML_APP = 'myMlApp' - process.env.DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS = 11 + process.env.DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS = '11' process.env.DD_RUNTIME_METRICS_ENABLED = 'true' process.env.DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED = 'true' process.env.DD_SERVICE = 'service' @@ -1451,7 +1451,7 @@ describe('Config', () => { process.env.DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED = 'true' process.env.DD_TRACE_GLOBAL_TAGS = 'foo:bar,baz:qux' process.env.DD_TRACE_MIDDLEWARE_TRACING_ENABLED = 'false' - process.env.DD_TRACE_PARTIAL_FLUSH_MIN_SPANS = 2000 + process.env.DD_TRACE_PARTIAL_FLUSH_MIN_SPANS = '2000' process.env.DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED = 'false' process.env.DD_TRACE_PEER_SERVICE_MAPPING = 'c:cc' process.env.DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT = 'restart' @@ -2221,7 +2221,7 @@ describe('Config', () => { }) it('should not be used when DD_TRACE_AGENT_PORT provided', () => { - process.env.DD_TRACE_AGENT_PORT = 12345 + process.env.DD_TRACE_AGENT_PORT = '12345' const config = new Config() @@ -2527,7 +2527,7 @@ describe('Config', () => { context('payload tagging', () => { let env - const staticConfig = require('../src/payload-tagging/config/aws') + const staticConfig = require('../src/payload-tagging/config/aws.json') beforeEach(() => { env = process.env @@ -2595,7 +2595,7 @@ describe('Config', () => { it('overriding max depth', () => { process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = 'all' process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = 'all' - process.env.DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH = 7 + process.env.DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH = '7' const taggingConfig = new Config().cloudPayloadTagging expect(taggingConfig).to.have.property('requestsEnabled', true) expect(taggingConfig).to.have.property('responsesEnabled', true) diff --git a/packages/dd-trace/test/debugger/devtools_client/breakpoints.spec.js b/packages/dd-trace/test/debugger/devtools_client/breakpoints.spec.js index 7b1e0b4d55f..f3c035c55ef 100644 --- a/packages/dd-trace/test/debugger/devtools_client/breakpoints.spec.js +++ b/packages/dd-trace/test/debugger/devtools_client/breakpoints.spec.js @@ -457,6 +457,16 @@ describe('breakpoints', function () { } }) +/** + * Generate a probe config + * + * @param {object} [config] Optional configuration object. + * @param {string} [config.id='probe-1'] The probe ID. + * @param {number} [config.version=1] The probe version. + * @param {object} [config.where={ sourceFile: 'test.js', lines: ['10'] }] The location information. + * @param {object} [config.when={ json: { eq: [{ ref: 'foo' }, 42] }, dsl: 'foo = 42' }] The condition for the probe. + * @returns {{ id: string; version: number; where: object; when: object; }} + */ function genProbeConfig ({ id, version, where, when } = {}) { return { id: id || 'probe-1', diff --git a/packages/dd-trace/test/debugger/devtools_client/condition-test-cases.js b/packages/dd-trace/test/debugger/devtools_client/condition-test-cases.js index 1df7dbfc00c..af428742b6b 100644 --- a/packages/dd-trace/test/debugger/devtools_client/condition-test-cases.js +++ b/packages/dd-trace/test/debugger/devtools_client/condition-test-cases.js @@ -1,5 +1,78 @@ 'use strict' +/** + * AST node types and test case shapes for devtools condition expressions. + * These typedefs intentionally avoid the `any` type and aim to be as precise as practical. + * + * @typedef {{ ref: string }} RefExpression + * @typedef {{ getmember: [Expression, string|RefExpression] }} GetMemberExpression + * @typedef {{ index: [Expression, number|string|RefExpression] }} IndexExpression + * @typedef {{ len: Expression }} LenExpression + * @typedef {{ count: Expression }} CountExpression + * @typedef {{ isEmpty: Expression }} IsEmptyExpression + * @typedef {{ eq: [Expression, Expression] }} EqExpression + * @typedef {{ ne: [Expression, Expression] }} NeExpression + * @typedef {{ gt: [Expression, Expression] }} GtExpression + * @typedef {{ ge: [Expression, Expression] }} GeExpression + * @typedef {{ lt: [Expression, Expression] }} LtExpression + * @typedef {{ le: [Expression, Expression] }} LeExpression + * @typedef {{ substring: [Expression, number, (number|undefined)?] }} SubstringExpression + * @typedef {{ startsWith: [Expression, string|RefExpression] }} StartsWithExpression + * @typedef {{ endsWith: [Expression, string|RefExpression] }} EndsWithExpression + * @typedef {{ any: [Expression, Expression] }} AnyExpression + * @typedef {{ all: [Expression, Expression] }} AllExpression + * @typedef {{ or: [Expression, Expression] }} OrExpression + * @typedef {{ and: [Expression, Expression] }} AndExpression + * @typedef {{ filter: [Expression, Expression] }} FilterExpression + * @typedef {{ not: Expression }} NotExpression + * @typedef {{ contains: [Expression, Expression] }} ContainsExpression + * @typedef {{ matches: [Expression, string|Expression] }} MatchesExpression + * @typedef {{ instanceof: [Expression, string] }} InstanceofExpression + * @typedef {{ isDefined: Expression }} IsDefinedExpression + * + * @typedef {null|boolean|number|string|bigint} Literal + * + * @typedef {Literal| + * RefExpression| + * GetMemberExpression| + * IndexExpression| + * LenExpression| + * CountExpression| + * IsEmptyExpression| + * EqExpression| + * NeExpression| + * GtExpression| + * GeExpression| + * LtExpression| + * LeExpression| + * SubstringExpression| + * StartsWithExpression| + * EndsWithExpression| + * AnyExpression| + * AllExpression| + * OrExpression| + * AndExpression| + * FilterExpression| + * NotExpression| + * ContainsExpression| + * MatchesExpression| + * InstanceofExpression| + * IsDefinedExpression} Expression + * + * @typedef {Object.} VariableBindings + * + * @typedef {[Expression, VariableBindings, unknown]} TestCaseTuple + * @typedef {{ + * ast: Expression, + * vars?: VariableBindings, + * expected?: unknown, + * execute?: boolean, + * before?: () => void, + * suffix?: string + * }} TestCaseObject + * @typedef {TestCaseTuple|TestCaseObject} TestCase + */ + class CustomObject {} class HasInstanceSideEffect { static [Symbol.hasInstance] () { throw new Error('This should never throw!') } @@ -9,9 +82,15 @@ const objectWithToPrimitiveSymbol = Object.create(Object.prototype, { [Symbol.toPrimitive]: { value: () => { throw new Error('This should never throw!') } } }) class EvilRegex extends RegExp { - exec () { throw new Error('This should never throw!') } + /** + * @override + * @param {string} string + * @returns {RegExpExecArray | null} + */ + exec (string) { throw new Error('This should never throw!') } } +/** @type {TestCase[]} */ const literals = [ [null, {}, null], [42, {}, 42], @@ -19,6 +98,7 @@ const literals = [ ['foo', {}, 'foo'] ] +/** @type {TestCase[]} */ const references = [ [{ ref: 'foo' }, { foo: 42 }, 42], [{ ref: 'foo' }, {}, new ReferenceError('foo is not defined')], @@ -97,6 +177,7 @@ const references = [ } ] +/** @type {TestCase[]} */ const propertyAccess = [ [{ getmember: [{ ref: 'obj' }, 'foo'] }, { obj: { foo: 'test-me' } }, 'test-me'], [ @@ -175,6 +256,7 @@ const propertyAccess = [ ] ] +/** @type {TestCase[]} */ const sizes = [ [{ len: { ref: 'str' } }, { str: 'hello' }, 5], [{ len: { ref: 'str' } }, { str: String('hello') }, 5], @@ -242,6 +324,7 @@ const sizes = [ ] ] +/** @type {TestCase[]} */ const equality = [ [{ eq: [{ ref: 'str' }, 'foo'] }, { str: 'foo' }, true], [{ eq: [{ ref: 'str' }, 'foo'] }, { str: 'bar' }, false], @@ -396,6 +479,7 @@ const equality = [ ] ] +/** @type {TestCase[]} */ const stringManipulation = [ [{ substring: [{ ref: 'str' }, 4, 7] }, { str: 'hello world' }, 'hello world'.substring(4, 7)], [{ substring: [{ ref: 'str' }, 4] }, { str: 'hello world' }, 'hello world'.substring(4)], @@ -418,6 +502,7 @@ const stringManipulation = [ ] ] +/** @type {TestCase[]} */ const stringComparison = [ [{ startsWith: [{ ref: 'str' }, 'hello'] }, { str: 'hello world!' }, true], [{ startsWith: [{ ref: 'str' }, 'world'] }, { str: 'hello world!' }, false], @@ -476,6 +561,7 @@ const stringComparison = [ ] ] +/** @type {TestCase[]} */ const logicalOperators = [ [{ any: [{ ref: 'arr' }, { isEmpty: { ref: '@it' } }] }, { arr: ['foo', 'bar', ''] }, true], [{ any: [{ ref: 'arr' }, { isEmpty: { ref: '@it' } }] }, { arr: ['foo', 'bar', 'baz'] }, false], @@ -498,6 +584,7 @@ const logicalOperators = [ [{ and: [{ ref: 'bar' }, { ref: 'foo' }] }, { bar: 42 }, new ReferenceError('foo is not defined')] ] +/** @type {TestCase[]} */ const collectionOperations = [ [{ filter: [{ ref: 'arr' }, { not: { isEmpty: { ref: '@it' } } }] }, { arr: ['foo', 'bar', ''] }, ['foo', 'bar']], [{ filter: [{ ref: 'tarr' }, { gt: [{ ref: '@it' }, 15] }] }, { tarr: new Int16Array([10, 20, 30]) }, [20, 30]], @@ -518,6 +605,7 @@ const collectionOperations = [ ] ] +/** @type {TestCase[]} */ const membershipAndMatching = [ [{ contains: [{ ref: 'str' }, 'world'] }, { str: 'hello world!' }, true], [{ contains: [{ ref: 'str' }, 'missing'] }, { str: 'hello world!' }, false], @@ -625,6 +713,7 @@ const membershipAndMatching = [ ] ] +/** @type {TestCase[]} */ const typeAndDefinitionChecks = [ // Primitive types [{ instanceof: [{ ref: 'foo' }, 'string'] }, { foo: 'foo' }, true], @@ -668,6 +757,14 @@ const typeAndDefinitionChecks = [ { ast: { isDefined: { ref: 'foo' } }, suffix: '', expected: false } ] +/** + * Define a getter on the provided object that throws on access. + * + * @template T extends object + * @param {T} obj + * @param {string} propName + * @returns {T} + */ function overloadPropertyWithGetter (obj, propName) { Object.defineProperty(obj, propName, { get () { throw new Error('This should never throw!') } @@ -675,11 +772,57 @@ function overloadPropertyWithGetter (obj, propName) { return obj } +/** + * Overwrite a method/property on the object with a throwing function. + * + * @template T extends object + * @param {T} obj + * @param {PropertyKey} methodName + * @returns {T} + */ function overloadMethod (obj, methodName) { obj[methodName] = () => { throw new Error('This should never throw!') } return obj } +/** + * Create a subclass of the given built-in where the given property/method is overloaded + * in the prototype chain to throw, and return a further subclass constructor. + * + * @overload + * @param {StringConstructor} Builtin + * @param {PropertyKey} propName + * @returns {StringConstructor} + * + * @overload + * @param {ArrayConstructor} Builtin + * @param {PropertyKey} propName + * @returns {ArrayConstructor} + * + * @overload + * @param {Int16ArrayConstructor} Builtin + * @param {PropertyKey} propName + * @returns {Int16ArrayConstructor} + * + * @overload + * @param {Int32ArrayConstructor} Builtin + * @param {PropertyKey} propName + * @returns {Int32ArrayConstructor} + * + * @overload + * @param {SetConstructor} Builtin + * @param {PropertyKey} propName + * @returns {SetConstructor} + * + * @overload + * @param {MapConstructor} Builtin + * @param {PropertyKey} propName + * @returns {MapConstructor} + * + * @param {new (...args: unknown[]) => object} Builtin + * @param {PropertyKey} propName + * @returns {new (...args: unknown[]) => object} + */ function createClassWithOverloadedMethodInPrototypeChain (Builtin, propName) { class Klass extends Builtin { [propName] () { throw new Error('This should never throw!') } @@ -690,6 +833,19 @@ function createClassWithOverloadedMethodInPrototypeChain (Builtin, propName) { return SubKlass } +/** @type {{ + * literals: TestCase[], + * references: TestCase[], + * propertyAccess: TestCase[], + * sizes: TestCase[], + * equality: TestCase[], + * stringManipulation: TestCase[], + * stringComparison: TestCase[], + * logicalOperators: TestCase[], + * collectionOperations: TestCase[], + * membershipAndMatching: TestCase[], + * typeAndDefinitionChecks: TestCase[] + * }} */ module.exports = { literals, references, diff --git a/packages/dd-trace/test/lambda/index.spec.js b/packages/dd-trace/test/lambda/index.spec.js index e29ab04b76a..8801d313ae9 100644 --- a/packages/dd-trace/test/lambda/index.spec.js +++ b/packages/dd-trace/test/lambda/index.spec.js @@ -35,7 +35,7 @@ const restoreEnv = () => { const loadAgent = ({ exporter = 'agent' } = {}) => { // Make sure the hook is re-registered require('../../src/lambda') - return agent.load(null, [], { + return agent.load([], [], { experimental: { exporter } diff --git a/packages/dd-trace/test/plugins/agent.js b/packages/dd-trace/test/plugins/agent.js index fba8700bfb9..d63816c7c1e 100644 --- a/packages/dd-trace/test/plugins/agent.js +++ b/packages/dd-trace/test/plugins/agent.js @@ -18,7 +18,9 @@ const llmobsHandlers = new Set() let sockets = [] let agent = null let listener = null +/** @type {import('../../src/index') | null} */ let tracer = null +/** @type {string[]} */ let plugins = [] const testedPlugins = [] let dsmStats = [] @@ -42,6 +44,14 @@ function ciVisRequestHandler (request, response) { }) } +/** + * Checks if a DSM stats object exists with a given hash and edge tags. + * + * @param {import('../../src/index')} agent + * @param {string} expectedHash + * @param {string[]} expectedEdgeTags + * @returns {boolean} + */ function dsmStatsExist (agent, expectedHash, expectedEdgeTags) { const dsmStats = agent.getDsmStats() const foundHashes = new Set() @@ -66,6 +76,13 @@ function dsmStatsExist (agent, expectedHash, expectedEdgeTags) { throw new Error(`Hash not found. Expected: ${expectedHash}, Found hashes: ${util.inspect(foundHashes)}`) } +/** + * Checks if a DSM stats object exists with a given parent hash. + * + * @param {import('../../src/index')} agent + * @param {string} expectedParentHash + * @returns {boolean} + */ function dsmStatsExistWithParentHash (agent, expectedParentHash) { const dsmStats = agent.getDsmStats() let hashFound = false @@ -84,6 +101,12 @@ function dsmStatsExistWithParentHash (agent, expectedParentHash) { return hashFound } +/** + * Unformats span events. + * + * @param {import('../../src/opentracing/span')} span + * @returns {import('../../src/opentracing/span')[]} + */ function unformatSpanEvents (span) { if (span.meta && span.meta.events) { // Parse the JSON string back into an object @@ -105,6 +128,11 @@ function unformatSpanEvents (span) { return [] // Return an empty array if no events are found } +/** + * Adds environment variables to headers. + * + * @param {http.IncomingHttpHeaders} headers + */ function addEnvironmentVariablesToHeaders (headers) { // get all environment variables that start with "DD_" const ddEnvVars = new Map( @@ -122,9 +150,10 @@ function addEnvironmentVariablesToHeaders (headers) { // add the DD environment variables to the header if any exist // to send with trace to final agent destination - if (ddEnvVars.length > 0) { - headers['X-Datadog-Trace-Env-Variables'] = ddEnvVars.join(',') - } + // if (ddEnvVars.size > 0) { + // // TODO: Should we still do this? It has never worked until now. + // headers['X-Datadog-Trace-Env-Variables'] = [...ddEnvVars].map(([key, value]) => `${key}=${value}`).join(',') + // } // serialize the DD environment variables into a string of k=v pairs separated by comma const serializedEnvVars = Array.from(ddEnvVars.entries()) @@ -136,6 +165,13 @@ function addEnvironmentVariablesToHeaders (headers) { headers['X-Datadog-Trace-Env-Variables'] = serializedEnvVars } +/** + * Handles the received trace request and sends trace to Test Agent if bool enabled. + * + * @param {express.Request} req + * @param {express.Response} res + * @param {boolean} sendToTestAgent + */ function handleTraceRequest (req, res, sendToTestAgent) { // handles the received trace request and sends trace to Test Agent if bool enabled. if (sendToTestAgent) { @@ -196,11 +232,7 @@ function checkAgentStatus () { return new Promise((resolve) => { const request = http.request(`${agentUrl}/info`, { method: 'GET' }, response => { - if (response.statusCode === 200) { - resolve(true) - } else { - resolve(false) - } + resolve(response.statusCode === 200) }) request.on('error', (_error_) => { @@ -221,26 +253,29 @@ function getCurrentIntegrationName () { const stack = new Error().stack // The regex looks for /packages/datadog-plugin-NAME/test/ in the stack trace const pluginTestRegex = /packages\/datadog-plugin-([^/]+)\/test/ - const match = stack.match(pluginTestRegex) + const match = stack?.match(pluginTestRegex) return match ? match[1] : null } -function assertIntegrationName (args) { +/** + * @param {import('../../src/opentracing/span')[][]} traces + */ +function assertIntegrationName (traces) { // we want to assert that all spans generated by an instrumentation have the right `_dd.integration` tag set if (currentIntegrationName) { - const traces = args[0] - if (traces && Array.isArray(traces)) { + // TODO(BridgeAR): Should we just fail, if we do not receive an array of traces? + if (Array.isArray(traces)) { traces.forEach(trace => { if (Array.isArray(trace)) { trace.forEach(span => { // ignore everything that has no component (i.e. manual span) // ignore everything that has already the component == _dd.integration - if (span && span.meta && span.meta.component && span.meta.component !== span.meta['_dd.integration']) { + if (span?.meta?.component && span.meta.component !== span.meta['_dd.integration']) { expect(span.meta['_dd.integration']).to.equal( currentIntegrationName, - `Expected span to have "_dd.integration" tag "${currentIntegrationName}" - but found "${span.meta['_dd.integration']}" for span ID ${span.span_id}` + `Expected span to have "_dd.integration" tag "${currentIntegrationName}" + but found "${span.meta['_dd.integration']}" for span ID ${span.span_id}` ) } }) @@ -253,6 +288,26 @@ function assertIntegrationName (args) { const DEFAULT_AVAILABLE_ENDPOINTS = ['/evp_proxy/v2'] let availableEndpoints = DEFAULT_AVAILABLE_ENDPOINTS +/** + * The options for the runCallbackAgainstTraces function. + * + * If a number is provided, it will be used as the timeoutMs. + * + * Defaults: + * - timeoutMs: 1000 + * - rejectFirst: false + * - spanResourceMatch: undefined + * + * @typedef {Object} RunCallbackAgainstTracesOptions + * @property {number} [timeoutMs=1000] - The timeout in ms. + * @property {boolean} [rejectFirst=false] - If true, reject the first time the callback throws. + * @property {RegExp} [spanResourceMatch] - A regex to match against the span resource. + * @typedef {import('../../src/opentracing/span')} Span + * For a given payload, an array of traces, each trace is an array of spans. + * @typedef {(traces: Span[][]) => void} TracesCallback + * @typedef {(agentlessPayload: {events: Event[]}, request: Request) => void} AgentlessCallback + * @typedef {TracesCallback | AgentlessCallback} RunCallbackAgainstTracesCallback + */ /** * Register a callback with expectations to be run on every tracing or stats payload sent to the agent depending * on the handlers inputted. If the callback does not throw, the returned promise resolves. If it does, @@ -260,16 +315,13 @@ let availableEndpoints = DEFAULT_AVAILABLE_ENDPOINTS * (default 1000 ms) and if any of them succeed, the promise will resolve. * Otherwise, it will reject. * - * @param {(traces: Array>) => void} callback - A function that tests a payload as it's received. - * @param {Object} [options] - An options object - * @param {number} [options.timeoutMs=1000] - The timeout in ms. - * @param {boolean} [options.rejectFirst=false] - If true, reject the first time the callback throws. - * @param {Set} [handlers] - Set of handlers to add the callback to. + * @param {RunCallbackAgainstTracesCallback} callback - A function that tests a payload as it's received. + * @param {RunCallbackAgainstTracesOptions} options={} - An options object + * @param {Set} handlers - Set of handlers to add the callback to. * @returns {Promise} A promise resolving if expectations are met */ -function runCallbackAgainstTraces (callback, options, handlers) { +function runCallbackAgainstTraces (callback, options = {}, handlers) { let error - let resolve let reject const promise = new Promise((_resolve, _reject) => { @@ -279,24 +331,27 @@ function runCallbackAgainstTraces (callback, options, handlers) { const rejectionTimeout = setTimeout(() => { if (error) reject(error) - }, options?.timeoutMs || 1000) + }, options.timeoutMs || 1000) const handlerPayload = { handler, - spanResourceMatch: options?.spanResourceMatch + spanResourceMatch: options.spanResourceMatch } - function handler () { + /** + * @type {TracesCallback | AgentlessCallback} + */ + function handler (...args) { // we assert integration name being tagged on all spans (when running integration tests) - assertIntegrationName(arguments) + assertIntegrationName(args[0]) try { - const result = callback.apply(null, arguments) + const result = callback(...args) handlers.delete(handlerPayload) clearTimeout(rejectionTimeout) resolve(result) } catch (e) { - if (options?.rejectFirst) { + if (/** @type {RunCallbackAgainstTracesOptions} */ (options).rejectFirst) { clearTimeout(rejectionTimeout) reject(e) } else { @@ -315,12 +370,29 @@ module.exports = { /** * Load the plugin on the tracer with an optional config and start a mock agent. * - * @param {String|Array} pluginName - Name or list of names of plugins to load + * @param {String|String[]} pluginNames - Name or list of names of plugins to load * @param {Record} [config] * @param {Record} [tracerConfig={}] * @returns Promise */ - async load (pluginName, config, tracerConfig = {}) { + /** + * Load the plugin on the tracer with an optional config and start a mock agent. + * + * @overload + * @param {String[]} pluginNames - Name or list of names of plugins to load + * @param {Record[]} config + * @param {Record} [tracerConfig={}] + * @returns Promise + */ + async load (pluginNames, config, tracerConfig = {}) { + if (!Array.isArray(pluginNames)) { + pluginNames = [pluginNames] + } + + if (!Array.isArray(config)) { + config = [config] + } + currentIntegrationName = getCurrentIntegrationName() tracer = require('../..') @@ -384,14 +456,16 @@ module.exports = { const server = this.server = http.createServer(agent) const emit = server.emit - server.emit = function () { + /** @type {(this: server, event: string, ...args: unknown[]) => boolean} */ + const originalEmit = emit + server.emit = function (event, ...args) { storage('legacy').enterWith({ noop: true }) - return emit.apply(this, arguments) + return originalEmit.call(this, event, ...args) } server.on('connection', socket => sockets.push(socket)) - const promise = new Promise((resolve, _reject) => { + const promise = /** @type {Promise} */ (new Promise((resolve, _reject) => { listener = server.listen(0, () => { const port = listener.address().port @@ -405,17 +479,15 @@ module.exports = { tracer.setUrl(`http://127.0.0.1:${port}`) - for (let i = 0, l = pluginName.length; i < l; i++) { - tracer.use(pluginName[i], config[i]) + for (let i = 0, l = pluginNames.length; i < l; i++) { + tracer.use(pluginNames[i], config[i]) } resolve() }) - }) + })) - pluginName = [].concat(pluginName) - plugins = pluginName - config = [].concat(config) + plugins = pluginNames server.on('close', () => { tracer = null @@ -426,15 +498,15 @@ module.exports = { return promise }, + /** + * @param {string} pluginName + * @param {Record} [config] + */ reload (pluginName, config) { - pluginName = [].concat(pluginName) - plugins = pluginName - config = [].concat(config) + plugins = [pluginName] dsmStats = [] - for (let i = 0, l = pluginName.length; i < l; i++) { - tracer.use(pluginName[i], config[i]) - } + tracer.use(pluginName, config) }, /** @@ -453,18 +525,11 @@ module.exports = { traceHandlers.delete(handler) }, - /** - * Callback for running test assertions against traces. - * - * @callback testAssertionTracesCallback - * @param {Array.>} traces - For a given payload, an array of traces, each trace is an array of spans. - */ - /** * Callback for running test assertions against a span. * * @callback testAssertionSpanCallback - * @param {span} span - For a given payload, the first span of the first trace. + * @param {Span} span - For a given payload, the first span of the first trace. */ /** @@ -472,10 +537,8 @@ module.exports = { * It calls the callback with a `traces` argument which is an array of traces. * Each of these traces is an array of spans. * - * @param {testAssertionTracesCallback} callback - runs once per agent payload - * @param {Object} [options] - An options object - * @param {number} [options.timeoutMs=1000] - The timeout in ms. - * @param {boolean} [options.rejectFirst=false] - If true, reject the first time the callback throws. + * @param {RunCallbackAgainstTracesCallback} callback - runs once per agent payload + * @param {RunCallbackAgainstTracesOptions} [options] - An options object * @returns Promise */ assertSomeTraces (callback, options) { @@ -485,11 +548,9 @@ module.exports = { /** * Same as assertSomeTraces() but only provides the first span (traces[0][0]) * This callback gets executed once for every payload received by the agent. - + * * @param {testAssertionSpanCallback|Record} callbackOrExpected - runs once per agent payload - * @param {Object} [options] - An options object - * @param {number} [options.timeoutMs=1000] - The timeout in ms. - * @param {boolean} [options.rejectFirst=false] - If true, reject the first time the callback throws. + * @param {RunCallbackAgainstTracesOptions} [options] - An options object * @returns Promise */ assertFirstTraceSpan (callbackOrExpected, options) { @@ -510,6 +571,10 @@ module.exports = { /** * Register a callback with expectations to be run on every stats payload sent to the agent. + * + * @param {RunCallbackAgainstTracesCallback} callback - runs once per agent payload + * @param {RunCallbackAgainstTracesOptions} [options] - An options object + * @returns Promise */ expectPipelineStats (callback, options) { return runCallbackAgainstTraces(callback, options, statsHandlers) @@ -517,8 +582,8 @@ module.exports = { /** * Use a callback handler for LLM Observability traces. - * @param {Function} callback - * @param {Record} options + * @param {RunCallbackAgainstTracesCallback} callback + * @param {RunCallbackAgainstTracesOptions} [options] * @returns */ useLlmobsTraces (callback, options) { @@ -536,6 +601,11 @@ module.exports = { /** * Stop the mock agent, reset all expectations and wipe the require cache. + * + * Defaults: + * - ritmReset: true + * - wipe: false + * * @param {Object} [options] * @param {boolean} [options.ritmReset=true] - Resets the Require In The Middle cache. You probably don't need this. * @param {boolean} [options.wipe=false] - Wipes tracer and non-native modules from require cache. You probably don't @@ -570,13 +640,13 @@ module.exports = { tracer.llmobs.disable() - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { this.server.on('close', () => { this.server = null resolve() }) - }) + })) }, setAvailableEndpoints (newEndpoints) { @@ -595,8 +665,7 @@ module.exports = { .map(exception => new RegExp(exception)) Object.keys(require.cache) - .filter(name => name.indexOf(basedir) !== -1) - .filter(name => !exceptions.some(exception => exception.test(name))) + .filter(name => name.includes(basedir) && !exceptions.some(exception => exception.test(name))) .forEach(name => { delete require.cache[name] }) diff --git a/packages/dd-trace/test/plugins/helpers.js b/packages/dd-trace/test/plugins/helpers.js index 2dea5062d26..c0298626f17 100644 --- a/packages/dd-trace/test/plugins/helpers.js +++ b/packages/dd-trace/test/plugins/helpers.js @@ -117,7 +117,7 @@ function unbreakThen (promise) { } function getNextLineNumber () { - return Number(new Error().stack.split('\n')[2].match(/:(\d+):/)[1]) + 1 + return Number(new Error().stack?.split('\n')[2].match(/:(\d+):/)?.[1]) + 1 } module.exports = { diff --git a/packages/dd-trace/test/plugins/suite.js b/packages/dd-trace/test/plugins/suite.js index a48266cf629..136be7f763b 100644 --- a/packages/dd-trace/test/plugins/suite.js +++ b/packages/dd-trace/test/plugins/suite.js @@ -20,9 +20,9 @@ const ddTraceInit = path.resolve(__dirname, '../../../../init') function exec (cmd, opts = {}) { const date = new Date() const time = [ - String(date.getHours()).padStart(2, 0), - String(date.getMinutes()).padStart(2, 0), - String(date.getSeconds()).padStart(2, 0) + String(date.getHours()).padStart(2, '0'), + String(date.getMinutes()).padStart(2, '0'), + String(date.getSeconds()).padStart(2, '0') ].join(':') console.log(time, '❯', cmd) return new Promise((resolve, reject) => { @@ -184,7 +184,7 @@ module.exports = async function runWithOptions (options) { } if (require.main === module) { - const { PLUGINS } = process.env + const { PLUGINS = '' } = process.env const plugins = PLUGINS.split('|') ;(async () => { for (const plugin of plugins) { diff --git a/packages/dd-trace/test/plugins/util/test.spec.js b/packages/dd-trace/test/plugins/util/test.spec.js index bf3d36f8ede..23c3934547e 100644 --- a/packages/dd-trace/test/plugins/util/test.spec.js +++ b/packages/dd-trace/test/plugins/util/test.spec.js @@ -286,7 +286,9 @@ describe('parseAnnotations', () => { it('does not crash with invalid arguments', () => { const tags = parseAnnotations([ + // @ts-expect-error: intentionally passing invalid types to test robustness {}, + // @ts-expect-error: intentionally passing invalid types to test robustness 'invalid', { type: 'DD_TAGS', description: 'yeah' }, { type: 'DD_TAGS[v', description: 'invalid' }, diff --git a/packages/dd-trace/test/profiling/config.spec.js b/packages/dd-trace/test/profiling/config.spec.js index 97570377269..13758a5c3fa 100644 --- a/packages/dd-trace/test/profiling/config.spec.js +++ b/packages/dd-trace/test/profiling/config.spec.js @@ -88,7 +88,7 @@ describe('config', () => { expect(config.exporters[0]._url.toString()).to.equal(options.url) expect(config.exporters[1]).to.be.an.instanceof(FileExporter) expect(config.profilers).to.be.an('array') - expect(config.profilers.length).to.equal(2 + samplingContextsAvailable) + expect(config.profilers.length).to.equal(2 + (samplingContextsAvailable ? 1 : 0)) expect(config.profilers[0]).to.be.an.instanceOf(SpaceProfiler) expect(config.profilers[1]).to.be.an.instanceOf(WallProfiler) expect(config.profilers[1].codeHotspotsEnabled()).false @@ -150,7 +150,7 @@ describe('config', () => { const config = new Config(options) expect(config.profilers).to.be.an('array') - expect(config.profilers.length).to.equal(1 + samplingContextsAvailable) + expect(config.profilers.length).to.equal(1 + (samplingContextsAvailable ? 1 : 0)) expect(config.profilers[0]).to.be.an.instanceOf(WallProfiler) expect(config.profilers[0].codeHotspotsEnabled()).to.equal(samplingContextsAvailable) if (samplingContextsAvailable) { @@ -213,7 +213,7 @@ describe('config', () => { const config = new Config(options) expect(config.profilers).to.be.an('array') - expect(config.profilers.length).to.equal(1 + samplingContextsAvailable) + expect(config.profilers.length).to.equal(1 + (samplingContextsAvailable ? 1 : 0)) expect(config.profilers[0]).to.be.an.instanceOf(WallProfiler) if (samplingContextsAvailable) { expect(config.profilers[1]).to.be.an.instanceOf(EventsProfiler) diff --git a/packages/dd-trace/test/profiling/exporters/agent.spec.js b/packages/dd-trace/test/profiling/exporters/agent.spec.js index 791ed29de15..9791ddf8b05 100644 --- a/packages/dd-trace/test/profiling/exporters/agent.spec.js +++ b/packages/dd-trace/test/profiling/exporters/agent.spec.js @@ -203,7 +203,7 @@ describe('exporters/agent', function () { space } - await new Promise((resolve, reject) => { + await /** @type {Promise} */ (new Promise((resolve, reject) => { app.post('/profiling/v1/input', upload.any(), (req, res) => { try { verifyRequest(req, profiles, start, end) @@ -216,7 +216,7 @@ describe('exporters/agent', function () { }) exporter.export({ profiles, start, end, tags }).catch(reject) - }) + })) startSpan.getCalls().forEach(call => { const [name, { tags }] = call.args @@ -423,7 +423,7 @@ describe('exporters/agent', function () { space } - await new Promise((resolve, reject) => { + await /** @type {Promise} */ (new Promise((resolve, reject) => { app.post('/profiling/v1/input', upload.any(), (req, res) => { try { verifyRequest(req, profiles, start, end) @@ -436,7 +436,7 @@ describe('exporters/agent', function () { }) exporter.export({ profiles, start, end, tags }).catch(reject) - }) + })) }) }) @@ -473,7 +473,7 @@ describe('exporters/agent', function () { space } - await new Promise((resolve, reject) => { + await /** @type {Promise} */ (new Promise((resolve, reject) => { app.post('/profiling/v1/input', upload.any(), (req, res) => { try { verifyRequest(req, profiles, start, end) @@ -486,7 +486,7 @@ describe('exporters/agent', function () { }) exporter.export({ profiles, start, end, tags }).catch(reject) - }) + })) }) }, { skip: os.platform() === 'win32' }) }) diff --git a/packages/dd-trace/test/profiling/loggers/console.spec.js b/packages/dd-trace/test/profiling/loggers/console.spec.js index 4ff89e12fff..ca7f00567dd 100644 --- a/packages/dd-trace/test/profiling/loggers/console.spec.js +++ b/packages/dd-trace/test/profiling/loggers/console.spec.js @@ -5,25 +5,27 @@ const sinon = require('sinon') require('../../setup/core') -/* eslint-disable no-console */ - describe('loggers/console', () => { let ConsoleLogger + let debugStub + let infoStub + let warnStub + let errorStub beforeEach(() => { - sinon.stub(console, 'debug') - sinon.stub(console, 'info') - sinon.stub(console, 'warn') - sinon.stub(console, 'error') + debugStub = sinon.stub(console, 'debug') + infoStub = sinon.stub(console, 'info') + warnStub = sinon.stub(console, 'warn') + errorStub = sinon.stub(console, 'error') ConsoleLogger = require('../../../src/profiling/loggers/console').ConsoleLogger }) afterEach(() => { - console.debug.restore() - console.info.restore() - console.warn.restore() - console.error.restore() + debugStub.restore() + infoStub.restore() + warnStub.restore() + errorStub.restore() }) it('should call the underlying console for error', () => { @@ -34,11 +36,11 @@ describe('loggers/console', () => { logger.info('info') logger.debug('debug') - sinon.assert.calledOnce(console.error) - sinon.assert.calledWith(console.error, 'error') - sinon.assert.notCalled(console.debug) - sinon.assert.notCalled(console.info) - sinon.assert.notCalled(console.warn) + sinon.assert.calledOnce(errorStub) + sinon.assert.calledWith(errorStub, 'error') + sinon.assert.notCalled(debugStub) + sinon.assert.notCalled(infoStub) + sinon.assert.notCalled(warnStub) }) it('should call the underlying console for warn', () => { @@ -49,12 +51,12 @@ describe('loggers/console', () => { logger.info('info') logger.debug('debug') - sinon.assert.calledOnce(console.error) - sinon.assert.calledWith(console.error, 'error') - sinon.assert.calledOnce(console.warn) - sinon.assert.calledWith(console.warn, 'warn') - sinon.assert.notCalled(console.info) - sinon.assert.notCalled(console.debug) + sinon.assert.calledOnce(errorStub) + sinon.assert.calledWith(errorStub, 'error') + sinon.assert.calledOnce(warnStub) + sinon.assert.calledWith(warnStub, 'warn') + sinon.assert.notCalled(infoStub) + sinon.assert.notCalled(debugStub) }) it('should call the underlying console for info', () => { @@ -65,13 +67,13 @@ describe('loggers/console', () => { logger.info('info') logger.debug('debug') - sinon.assert.calledOnce(console.error) - sinon.assert.calledWith(console.error, 'error') - sinon.assert.calledOnce(console.warn) - sinon.assert.calledWith(console.warn, 'warn') - sinon.assert.calledOnce(console.info) - sinon.assert.calledWith(console.info, 'info') - sinon.assert.notCalled(console.debug) + sinon.assert.calledOnce(errorStub) + sinon.assert.calledWith(errorStub, 'error') + sinon.assert.calledOnce(warnStub) + sinon.assert.calledWith(warnStub, 'warn') + sinon.assert.calledOnce(infoStub) + sinon.assert.calledWith(infoStub, 'info') + sinon.assert.notCalled(debugStub) }) it('should call the underlying console for debug', () => { @@ -82,13 +84,13 @@ describe('loggers/console', () => { logger.info('info') logger.debug('debug') - sinon.assert.calledOnce(console.error) - sinon.assert.calledWith(console.error, 'error') - sinon.assert.calledOnce(console.warn) - sinon.assert.calledWith(console.warn, 'warn') - sinon.assert.calledOnce(console.info) - sinon.assert.calledWith(console.info, 'info') - sinon.assert.calledOnce(console.debug) - sinon.assert.calledWith(console.debug, 'debug') + sinon.assert.calledOnce(errorStub) + sinon.assert.calledWith(errorStub, 'error') + sinon.assert.calledOnce(warnStub) + sinon.assert.calledWith(warnStub, 'warn') + sinon.assert.calledOnce(infoStub) + sinon.assert.calledWith(infoStub, 'info') + sinon.assert.calledOnce(debugStub) + sinon.assert.calledWith(debugStub, 'debug') }) }) diff --git a/packages/dd-trace/test/profiling/profilers/wall.spec.js b/packages/dd-trace/test/profiling/profilers/wall.spec.js index e35374028ab..925d8da12af 100644 --- a/packages/dd-trace/test/profiling/profilers/wall.spec.js +++ b/packages/dd-trace/test/profiling/profilers/wall.spec.js @@ -39,18 +39,26 @@ describe('profilers/native/wall', () => { // Verify start/stop profiler idle notifiers are created if not present. // These functions may not exist in worker threads. + // @ts-expect-error: _startProfilerIdleNotifier is not typed on process const start = process._startProfilerIdleNotifier + // @ts-expect-error: _stopProfilerIdleNotifier is not typed on process const stop = process._stopProfilerIdleNotifier + // @ts-expect-error: _startProfilerIdleNotifier is not typed on process delete process._startProfilerIdleNotifier + // @ts-expect-error: _stopProfilerIdleNotifier is not typed on process delete process._stopProfilerIdleNotifier profiler.start() + // @ts-expect-error: _startProfilerIdleNotifier is not typed on process expect(process._startProfilerIdleNotifier).to.be.a('function') + // @ts-expect-error: _stopProfilerIdleNotifier is not typed on process expect(process._stopProfilerIdleNotifier).to.be.a('function') + // @ts-expect-error: _startProfilerIdleNotifier is not typed on process process._startProfilerIdleNotifier = start + // @ts-expect-error: _stopProfilerIdleNotifier is not typed on process process._stopProfilerIdleNotifier = stop sinon.assert.calledOnce(pprof.time.start) diff --git a/packages/dd-trace/test/ramdom_sampler.spec.js b/packages/dd-trace/test/ramdom_sampler.spec.js index e49aaada1f1..6b79128f41c 100644 --- a/packages/dd-trace/test/ramdom_sampler.spec.js +++ b/packages/dd-trace/test/ramdom_sampler.spec.js @@ -9,14 +9,15 @@ require('./setup/core') describe('RandomSampler', () => { let RandomSampler let sampler + let randomStub beforeEach(() => { - sinon.stub(Math, 'random') + randomStub = sinon.stub(Math, 'random') RandomSampler = require('../src/random_sampler') }) afterEach(() => { - Math.random.restore() + randomStub.restore() }) describe('rate', () => { @@ -31,7 +32,7 @@ describe('RandomSampler', () => { it('should always sample when rate is 1', () => { sampler = new RandomSampler(1) - Math.random.returns(0.9999999999999999) + randomStub.returns(0.9999999999999999) expect(sampler.isSampled()).to.be.true }) @@ -39,7 +40,7 @@ describe('RandomSampler', () => { it('should never sample when rate is 0', () => { sampler = new RandomSampler(0) - Math.random.returns(0) + randomStub.returns(0) expect(sampler.isSampled()).to.be.false }) @@ -47,11 +48,11 @@ describe('RandomSampler', () => { it('should sample according to the rate', () => { sampler = new RandomSampler(0.1234) - Math.random.returns(0.1233999999999999) + randomStub.returns(0.1233999999999999) expect(sampler.isSampled()).to.be.true - Math.random.returns(0.1234) + randomStub.returns(0.1234) expect(sampler.isSampled()).to.be.false }) diff --git a/packages/dd-trace/test/runtime_metrics.spec.js b/packages/dd-trace/test/runtime_metrics.spec.js index 2891d2d8cfb..b36fbd86f4e 100644 --- a/packages/dd-trace/test/runtime_metrics.spec.js +++ b/packages/dd-trace/test/runtime_metrics.spec.js @@ -425,10 +425,6 @@ function createGarbage (count = 50) { }) describe('Event Loop Utilization', () => { - afterEach(() => { - performance.eventLoopUtilization.restore?.() - }) - it('should calculate utilization correctly with delta values', () => { const firstElu = { idle: 80000000, active: 20000000, utilization: 0.2 } const secondElu = { idle: 100000000, active: 80000000, utilization: 0.4444444444444444 } @@ -438,7 +434,7 @@ function createGarbage (count = 50) { diff = performance.eventLoopUtilization(secondElu, thirdElu) assert.strictEqual(diff.utilization, -0) - sinon.stub(performance, 'eventLoopUtilization') + const eventLoopUtilizationStub = sinon.stub(performance, 'eventLoopUtilization') .onFirstCall().returns(firstElu) .onSecondCall().returns(secondElu) .onThirdCall().returns(thirdElu) @@ -447,7 +443,7 @@ function createGarbage (count = 50) { clock.tick(10000) // Second collection with delta clock.tick(10000) // Second collection with delta - performance.eventLoopUtilization.restore() + eventLoopUtilizationStub.restore() const eluCalls = client.gauge.getCalls().filter(call => call.args[0] === 'runtime.node.event_loop.utilization' @@ -475,11 +471,11 @@ function createGarbage (count = 50) { } } const cpuUsage = process.cpuUsage() - sinon.stub(process, 'cpuUsage').returns(cpuUsage) - sinon.stub(performance, 'now').returns(startPerformanceNow + 10000) + const cpuUsageStub = sinon.stub(process, 'cpuUsage').returns(cpuUsage) + const performanceNowStub = sinon.stub(performance, 'now').returns(startPerformanceNow + 10000) clock.tick(10000 - ticks) - performance.now.restore() - process.cpuUsage.restore() + performanceNowStub.restore() + cpuUsageStub.restore() const timeDivisor = 100_000 // Microseconds * 100 for percent @@ -560,17 +556,17 @@ function createGarbage (count = 50) { // On linux performance.now() would return a negative value due to the mocked time. // This is a workaround to ensure the test is deterministic. const startPerformanceNow = Math.max(performance.now(), Math.random() * 1_000_000) - sinon.stub(performance, 'now').returns(startPerformanceNow) + const nowStub = sinon.stub(performance, 'now').returns(startPerformanceNow) clock.tick(10000) - performance.now.restore() + nowStub.restore() const firstUptimeCalls = client.gauge.getCalls() .filter(call => call.args[0] === 'runtime.node.process.uptime') const firstUptime = firstUptimeCalls[0].args[1] client.gauge.resetHistory() - sinon.stub(performance, 'now').returns(startPerformanceNow + 10_000) + const nowStub2 = sinon.stub(performance, 'now').returns(startPerformanceNow + 10_000) clock.tick(10000) // Advance another 10 seconds - performance.now.restore() + nowStub2.restore() let nextUptimeCall = client.gauge.getCalls().filter(call => call.args[0] === 'runtime.node.process.uptime') assert.strictEqual(nextUptimeCall.length, 1) @@ -584,9 +580,9 @@ function createGarbage (count = 50) { ) client.gauge.resetHistory() - sinon.stub(performance, 'now').returns(startPerformanceNow + 20_001) + const nowStub3 = sinon.stub(performance, 'now').returns(startPerformanceNow + 20_001) clock.tick(10000) // Advance another 10 seconds - performance.now.restore() + nowStub3.restore() nextUptimeCall = client.gauge.getCalls().filter(call => call.args[0] === 'runtime.node.process.uptime') assert.strictEqual(nextUptimeCall.length, 1) @@ -637,9 +633,7 @@ function createGarbage (count = 50) { clock.tick(10000) - process.memoryUsage.restore() - os.totalmem.restore() - os.freemem.restore() + sinon.restore() const metrics = client.gauge.getCalls().reduce((acc, call) => { acc[call.args[0]] = call.args[1] diff --git a/packages/dd-trace/test/setup/operation.js b/packages/dd-trace/test/setup/operation.js index 5448c751b9f..3d4bd641e61 100644 --- a/packages/dd-trace/test/setup/operation.js +++ b/packages/dd-trace/test/setup/operation.js @@ -18,6 +18,7 @@ class RetryOperation extends BaseRetryOperation { super(timeouts, { service }) } + /** @this {{ _options: { service?: string } }} */ retry (error) { const shouldRetry = super.retry(error) @@ -36,4 +37,8 @@ function logAttempt (service, message) { console.error(`[Retrying connection to ${service}] ${message}`) } +/** + * @typedef {new (service: string) => import('retry/lib/retry_operation')} RetryOperationConstructor + */ +/** @type {RetryOperationConstructor} */ module.exports = RetryOperation diff --git a/packages/dd-trace/test/setup/services/memcached.js b/packages/dd-trace/test/setup/services/memcached.js index 34163242755..43ee1cffa89 100644 --- a/packages/dd-trace/test/setup/services/memcached.js +++ b/packages/dd-trace/test/setup/services/memcached.js @@ -4,10 +4,10 @@ const RetryOperation = require('../operation') const Memcached = require('../../../../../versions/memcached').get() function waitForMemcached () { - return new Promise((resolve, reject) => { + return /** @type {Promise} */ (new Promise((resolve, reject) => { const operation = new RetryOperation('memcached') - operation.attempt(currentAttempt => { + operation.attempt(() => { const memcached = new Memcached('localhost:11211', { retries: 0 }) memcached.version((err, version) => { @@ -18,7 +18,7 @@ function waitForMemcached () { resolve() }) }) - }) + })) } module.exports = waitForMemcached diff --git a/packages/dd-trace/test/startup-log.spec.js b/packages/dd-trace/test/startup-log.spec.js index 58d887bf8a4..6a41c409855 100644 --- a/packages/dd-trace/test/startup-log.spec.js +++ b/packages/dd-trace/test/startup-log.spec.js @@ -58,11 +58,16 @@ describe('startup logging', () => { 'rule2', new SamplingRule({ name: 'rule3', sampleRate: 1.4 }) ]) + // Use sinon's stub instance directly to avoid type errors + // eslint-disable-next-line no-console + const infoStub = /** @type {sinon.SinonStub} */ (console.info) + // eslint-disable-next-line no-console + const warnStub = /** @type {sinon.SinonStub} */ (console.warn) startupLog({ agentError: { message: 'Error: fake error' } }) - firstStderrCall = console.info.firstCall /* eslint-disable-line no-console */ - secondStderrCall = console.warn.firstCall /* eslint-disable-line no-console */ - console.info.restore() /* eslint-disable-line no-console */ - console.warn.restore() /* eslint-disable-line no-console */ + firstStderrCall = infoStub.firstCall + secondStderrCall = warnStub.firstCall + infoStub.restore() + warnStub.restore() }) it('startupLog should be formatted correctly', () => { @@ -124,8 +129,9 @@ describe('profiling_enabled', () => { setStartupLogPluginManager({ _pluginsByName: {} }) startupLog() /* eslint-disable-next-line no-console */ - const logObj = JSON.parse(console.info.firstCall.args[0].replace('DATADOG TRACER CONFIGURATION - ', '')) - console.info.restore() /* eslint-disable-line no-console */ + const infoStub = /** @type {sinon.SinonStub} */ (console.info) + const logObj = JSON.parse(infoStub.firstCall.args[0].replace('DATADOG TRACER CONFIGURATION - ', '')) + infoStub.restore() expect(logObj.profiling_enabled).to.equal(expected) }) }) diff --git a/packages/dd-trace/test/telemetry/dependencies.spec.js b/packages/dd-trace/test/telemetry/dependencies.spec.js index 0a94f0eb817..fa6fd375fdc 100644 --- a/packages/dd-trace/test/telemetry/dependencies.spec.js +++ b/packages/dd-trace/test/telemetry/dependencies.spec.js @@ -13,6 +13,15 @@ const moduleLoadStartChannel = dc.channel('dd-trace:moduleLoadStart') const originalSetImmediate = global.setImmediate describe('dependencies', () => { + function setImmediate2 (callback, ...args) { + return callback(...args) + } + + setImmediate2.__promisify__ = function (...args) { + setImmediate2(() => undefined, ...args) + return Promise.resolve() + } + describe('start', () => { it('should subscribe', () => { const subscribe = sinon.stub() @@ -48,7 +57,8 @@ describe('dependencies', () => { './send-data': { sendData }, '../require-package-json': requirePackageJson }) - global.setImmediate = function (callback) { callback() } + + global.setImmediate = setImmediate2 dependencies.start(config, application, host, getRetryData, updateRetryData) @@ -334,7 +344,7 @@ describe('dependencies', () => { './send-data': { sendData }, '../require-package-json': requirePackageJson }) - global.setImmediate = function (callback) { callback() } + global.setImmediate = setImmediate2 dependencies.start(config, application, host, getRetryData, updateRetryData) @@ -397,7 +407,7 @@ describe('dependencies', () => { './send-data': { sendData }, '../require-package-json': requirePackageJson }) - global.setImmediate = function (callback) { callback() } + global.setImmediate = setImmediate2 dependencies.start(config, application, host, getRetryData, updateRetryData) diff --git a/scripts/install_plugin_modules.js b/scripts/install_plugin_modules.js index 5d15f007fa6..a2190ed3982 100644 --- a/scripts/install_plugin_modules.js +++ b/scripts/install_plugin_modules.js @@ -202,10 +202,33 @@ async function assertIndex (name, version) { const requirePackageJson = require('${requirePackageJsonPath}') module.exports = { + /** + * Load the module. + * + * @param {string} [id] The name/id of the module to get. + * @returns {import('${name}') | never} The module. + */ get (id) { return require(id || '${name}') }, - getPath (id) { return require.resolve(id || '${name}' ) }, + /** + * Resolve the path for a module id. + * + * @param {string} [id] The module id to resolve. + * @returns {string | never} The resolved path. + */ + getPath (id) { return require.resolve(id || '${name}') }, + /** + * Resolve the package.json path for a module id. + * + * @param {string} [id] The module id to resolve. + * @returns {string | never} The resolved package.json path. + */ pkgJsonPath (id) { return require.resolve((id || '${name}') + '/package.json') }, - version () { return requirePackageJson('${name}', module).version } + /** + * Resolve the package's version for a module id. + * + * @returns {string | never} The resolved package's version. + */ + version () { return requirePackageJson('${name}', /** @type {import('module').Module} */ (module)).version } } ` await writeFile(filename(name, version, 'index.js'), index) @@ -273,13 +296,23 @@ function filename (name, version, file) { } /** - * @template {string|null} T - * @param {T} str - * @returns {T extends null ? undefined : T extends string ? string : never} + * @overload + * @param {string} str + * @returns {string} + */ +/** + * @overload + * @param {null} str + * @returns {undefined} + */ +/** + * @overload + * @param {string|null} str + * @returns {string|undefined} */ function sha1 (str) { - if (!str) return /** @type {any} */ (undefined) + if (!str) return const shasum = createHash('sha1') shasum.update(str) - return /** @type {any} */ (shasum.digest('hex')) + return shasum.digest('hex') } diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000000..6aa832234ab --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,39 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "noEmit": true, + + "noImplicitReturns": true, + "strict": true, + "strictBindCallApply": true, + "strictBuiltinIteratorReturn": true, + "strictFunctionTypes": true, + + "alwaysStrict": false, + "exactOptionalPropertyTypes": false, + "forceConsistentCasingInFileNames": false, + "noFallthroughCasesInSwitch": false, + "noImplicitAny": false, + "noImplicitOverride": false, + "noImplicitThis": false, + "noPropertyAccessFromIndexSignature": false, + "noUncheckedIndexedAccess": false, + "useUnknownInCatchVariables": false, + + "target": "ES2022", + "lib": ["ES2022"], + "module": "Node16", + "moduleResolution": "node16", + "moduleDetection": "force", + "resolveJsonModule": true, + "verbatimModuleSyntax": true, + "baseUrl": ".", + + "types": ["node"], + "typeRoots": ["./types", "./node_modules/@types"], + "skipLibCheck": false + }, + "include": ["packages/**/*.js", "packages/**/*.mjs"], + "exclude": ["node_modules"] +} diff --git a/version.js b/version.js index abd20badf48..2fa9b18d277 100644 --- a/version.js +++ b/version.js @@ -1,8 +1,10 @@ 'use strict' var version = require('./package.json').version -var ddMatches = version.match(/^(\d+)\.(\d+)\.(\d+)/) -var nodeMatches = process.versions.node.match(/^(\d+)\.(\d+)\.(\d+)/) +// @ts-expect-error +var /** @type {RegExpMatchArray} */ ddMatches = version.match(/^(\d+)\.(\d+)\.(\d+)/) +// @ts-expect-error +var /** @type {RegExpMatchArray} */ nodeMatches = process.versions.node.match(/^(\d+)\.(\d+)\.(\d+)/) module.exports = { VERSION: version, diff --git a/yarn.lock b/yarn.lock index eb0349b652f..e58cbf882a7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -796,6 +796,11 @@ resolved "https://registry.yarnpkg.com/@types/aws-lambda/-/aws-lambda-8.10.152.tgz#f68424a8175f0a54a2a941e65b76c3f51f3bd89d" integrity sha512-soT/c2gYBnT5ygwiHPmd9a1bftj462NWVk2tKCc1PYHSIacB2UwbTS2zYG4jzag1mRDuzg/OjtxQjQ2NKRB6Rw== +"@types/chai@^4.3.16": + version "4.3.20" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.20.tgz#cb291577ed342ca92600430841a00329ba05cecc" + integrity sha512-/pC9HAB5I/xMlc5FP77qjCnI16ChlJfW0tGa0IUcFn38VJrTV6DeZ60NU5KZBtaOZqjdpwTWohz5HU1RrhiYxQ== + "@types/estree@^1.0.6": version "1.0.8" resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.8.tgz#958b91c991b1867ced318bedea0e215ee050726e" @@ -811,12 +816,17 @@ resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" integrity sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ== -"@types/node@>=13.7.0": - version "22.13.10" - resolved "https://registry.yarnpkg.com/@types/node/-/node-22.13.10.tgz#df9ea358c5ed991266becc3109dc2dc9125d77e4" - integrity sha512-I6LPUvlRH+O6VRUqYOcMudhaIdUVWfsjnZavnsraHvpBwaEyMN29ry+0UVJhImYL16xsscu0aske3yA+uPOWfw== +"@types/mocha@^10.0.10": + version "10.0.10" + resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-10.0.10.tgz#91f62905e8d23cbd66225312f239454a23bebfa0" + integrity sha512-xPyYSz1cMPnJQhl0CLMH68j3gprKZaTjG3s5Vi+fDgx+uhG9NOXwbVt52eFS8ECyXhyKcjDLCBEqBExKuiZb7Q== + +"@types/node@*", "@types/node@>=13.7.0": + version "24.5.1" + resolved "https://registry.yarnpkg.com/@types/node/-/node-24.5.1.tgz#dab6917c47113eb4502d27d06e89a407ec0eff95" + integrity sha512-/SQdmUP2xa+1rdx7VwB9yPq8PaKej8TD5cQ+XfKDPWWC+VDJU4rvVVagXqKUzhKjtFoNA8rXDJAkCxQPAe00+Q== dependencies: - undici-types "~6.20.0" + undici-types "~7.12.0" "@types/node@^18.19.106": version "18.19.127" @@ -844,6 +854,25 @@ resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.8.tgz#ce5ace04cfeabe7ef87c0091e50752e36707deff" integrity sha512-WZLiwShhwLRmeV6zH+GkbOFT6Z6VklCItrDioxUnv+u4Ll+8vKeFySoFyK/0ctcRpOmwAicELfmys1sDc/Rw+A== +"@types/sinon@^17.0.4": + version "17.0.4" + resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-17.0.4.tgz#fd9a3e8e07eea1a3f4a6f82a972c899e5778f369" + integrity sha512-RHnIrhfPO3+tJT0s7cFaXGZvsL4bbR3/k7z3P312qMS4JaS2Tk+KiwiLx1S0rQ56ERj00u1/BtdyVd0FY+Pdew== + dependencies: + "@types/sinonjs__fake-timers" "*" + +"@types/sinonjs__fake-timers@*": + version "8.1.5" + resolved "https://registry.yarnpkg.com/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.5.tgz#5fd3592ff10c1e9695d377020c033116cc2889f2" + integrity sha512-mQkU2jY8jJEF7YHjHvsQO8+3ughTL1mcnn96igfhONmR+fUPSKIkefQYpSe8bsly2Ep7oQbn/6VG5/9/0qcArQ== + +"@types/tap@^15.0.12": + version "15.0.12" + resolved "https://registry.yarnpkg.com/@types/tap/-/tap-15.0.12.tgz#e91b6a2e949d18f71e14bda421b9044c75d3fb0f" + integrity sha512-QuVlxQEBOBASkirrwp0ciwO9stIzOdRMHyaYYsexeVSAYwR4sq+YIYaQbVaYXSXi8+yPf22ZZNieRCB8KAJrTA== + dependencies: + "@types/node" "*" + "@types/yoga-layout@1.9.2": version "1.9.2" resolved "https://registry.yarnpkg.com/@types/yoga-layout/-/yoga-layout-1.9.2.tgz#efaf9e991a7390dc081a0b679185979a83a9639a" @@ -4798,6 +4827,11 @@ typedarray@^0.0.6: resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== +typescript@^5.9.2: + version "5.9.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.9.2.tgz#d93450cddec5154a2d5cabe3b8102b83316fb2a6" + integrity sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A== + unbox-primitive@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.1.0.tgz#8d9d2c9edeea8460c7f35033a88867944934d1e2" @@ -4813,10 +4847,10 @@ undici-types@~5.26.4: resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-5.26.5.tgz#bcd539893d00b56e964fd2657a4866b221a65617" integrity sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA== -undici-types@~6.20.0: - version "6.20.0" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.20.0.tgz#8171bf22c1f588d1554d55bf204bc624af388433" - integrity sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg== +undici-types@~7.12.0: + version "7.12.0" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-7.12.0.tgz#15c5c7475c2a3ba30659529f5cdb4674b622fafb" + integrity sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ== unicode-length@^2.0.2: version "2.1.0"