From bd182e603e85f40c9809fa7fab21e7164c62d3d7 Mon Sep 17 00:00:00 2001 From: Nathan Baulch Date: Sun, 22 Sep 2024 15:03:43 +1000 Subject: [PATCH] test: fix typos --- .../non-node-context/test-make-buffer.js | 2 +- test/addons/openssl-providers/providers.cjs | 2 +- ...readline.js => test-benchmark-readline.js} | 0 test/common/index.js | 8 +- test/common/report.js | 2 +- test/es-module/test-typescript.mjs | 2 +- test/es-module/test-wasm-web-api.js | 4 +- test/fixtures/dotenv/basic-valid.env | 2 +- test/fixtures/guess-hash-seed.js | 8 +- .../node_modules/commander/lib/help.js | 6 +- .../node_modules/postject/README.markdown | 4 +- .../node_modules/postject/dist/api.js | 4 +- test/fixtures/snapshot/marked.js | 12 +- test/fixtures/snapshot/typescript.js | 278 +++++++++--------- .../test-runner/coverage-loader/hooks.mjs | 2 +- .../test-runner/output/skip_pattern.js | 2 +- .../wpt/FileAPI/BlobURL/test2-manual.html | 2 +- .../Determining-Encoding.any.js | 12 +- .../FileAPI/support/send-file-form-helper.js | 2 +- .../support/send-file-formdata-helper.js | 2 +- .../sign_verify/eddsa_small_order_points.js | 2 +- .../wpt/common/security-features/README.md | 2 +- ...decompression-split-chunk.tentative.any.js | 6 +- .../overscroll-event-fired-to-window.html | 2 +- .../dom/events/scrolling/scroll_support.js | 2 +- .../cached-image-gets-single-entry.html | 2 +- .../resource-timing/initiator-type/link.html | 2 +- .../resources/connection-reuse-test.js | 4 +- test/fixtures/wpt/resources/channel.sub.js | 2 +- test/fixtures/wpt/resources/idlharness.js | 2 +- .../wpt/resources/testdriver-actions.js | 4 +- test/fixtures/wpt/resources/testharness.js | 6 +- .../wpt/resources/webidl2/lib/webidl2.js | 4 +- .../transfer-with-messageport.window.js | 6 +- test/fixtures/wpt/url/a-element.html | 4 +- .../wpt/user-timing/measure-exceptions.html | 2 +- .../wpt/user-timing/measure-l3.any.js | 2 +- .../broadcastchannel/workers.html | 2 +- ...opens-3P-window.partitioned.tentative.html | 2 +- .../wpt/webstorage/storage_getitem.window.js | 2 +- .../wpt/webstorage/storage_length.window.js | 2 +- test/internet/test-uv-threadpool-schedule.js | 2 +- .../test_function/test_function.c | 2 +- test/js-native-api/test_object/test_object.c | 2 +- test/js-native-api/test_reference/test.js | 2 +- test/js-native-api/test_string/test_string.c | 2 +- .../test_init_order/test_init_order.cc | 2 +- .../test_instance_data/test_instance_data.c | 2 +- test/parallel/parallel.status | 2 +- test/parallel/test-blob-file-backed.js | 2 +- test/parallel/test-cli-options-negation.js | 2 +- .../parallel/test-compile-cache-api-tmpdir.js | 2 +- test/parallel/test-crypto-x509.js | 2 +- .../test-debugger-set-context-line-number.mjs | 4 +- test/parallel/test-fs-readv-promises.js | 16 +- test/parallel/test-fs-readv-sync.js | 12 +- test/parallel/test-fs-readv.js | 18 +- .../test-fs-watch-recursive-update-file.js | 2 +- .../test-inspector-break-when-eval.js | 2 +- test/parallel/test-net-listen-twice.js | 2 +- test/parallel/test-net-pipe-with-long-path.js | 2 +- .../test-process-exit-code-validation.js | 4 +- test/parallel/test-runner-reporters.js | 22 +- ...t-unhandled-exception-with-worker-inuse.js | 2 +- test/parallel/test-url-relative.js | 2 +- test/parallel/test-v8-query-objects.js | 2 +- ...-worker-message-port-transfer-duplicate.js | 2 +- .../test-worker-message-port-wasm-threads.js | 2 +- test/sequential/test-error-serdes.js | 8 +- .../test-performance-eventloopdelay.js | 2 +- test/sequential/test-worker-eventlooputil.js | 2 +- test/sequential/test-worker-prof.js | 2 +- 72 files changed, 275 insertions(+), 275 deletions(-) rename test/benchmark/{test-bechmark-readline.js => test-benchmark-readline.js} (100%) diff --git a/test/addons/non-node-context/test-make-buffer.js b/test/addons/non-node-context/test-make-buffer.js index 344ea973d76c0c..26458137b05bca 100644 --- a/test/addons/non-node-context/test-make-buffer.js +++ b/test/addons/non-node-context/test-make-buffer.js @@ -6,7 +6,7 @@ const { makeBufferInNewContext, } = require(`./build/${common.buildType}/binding`); -// Because the `Buffer` function and its protoype property only (currently) +// Because the `Buffer` function and its prototype property only (currently) // exist in a Node.js instance’s main context, trying to create buffers from // another context throws an exception. assert.throws( diff --git a/test/addons/openssl-providers/providers.cjs b/test/addons/openssl-providers/providers.cjs index 901ea5041ba34e..2dabbf020e2a41 100644 --- a/test/addons/openssl-providers/providers.cjs +++ b/test/addons/openssl-providers/providers.cjs @@ -13,7 +13,7 @@ const { getProviders } = require(`./build/${common.buildType}/binding`); // For the providers defined here, the expectation is that the listed ciphers // and hash algorithms are only provided by the named provider. These are for -// basic checks and are not intended to list evey cipher or hash algorithm +// basic checks and are not intended to list every cipher or hash algorithm // supported by the provider. const providers = { 'default': { diff --git a/test/benchmark/test-bechmark-readline.js b/test/benchmark/test-benchmark-readline.js similarity index 100% rename from test/benchmark/test-bechmark-readline.js rename to test/benchmark/test-benchmark-readline.js diff --git a/test/common/index.js b/test/common/index.js index dd5f43c8664eb8..e7871dabca4b90 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -171,7 +171,7 @@ const buildType = process.config.target_defaults ? // If env var is set then enable async_hook hooks for all tests. if (process.env.NODE_TEST_WITH_ASYNC_HOOKS) { - const destroydIdsList = {}; + const destroyIdsList = {}; const destroyListList = {}; const initHandles = {}; const { internalBinding } = require('internal/test/binding'); @@ -210,12 +210,12 @@ if (process.env.NODE_TEST_WITH_ASYNC_HOOKS) { before() { }, after() { }, destroy(id) { - if (destroydIdsList[id] !== undefined) { - process._rawDebug(destroydIdsList[id]); + if (destroyIdsList[id] !== undefined) { + process._rawDebug(destroyIdsList[id]); process._rawDebug(); throw new Error(`destroy called for same id (${id})`); } - destroydIdsList[id] = inspect(new Error()); + destroyIdsList[id] = inspect(new Error()); }, }).enable(); } diff --git a/test/common/report.js b/test/common/report.js index 6e41561186570d..6fce96590c3f54 100644 --- a/test/common/report.js +++ b/test/common/report.js @@ -251,7 +251,7 @@ function _validateContent(report, fields = []) { assert(typeof usage.free_memory, 'string'); assert(typeof usage.total_memory, 'string'); assert(typeof usage.available_memory, 'string'); - // This field may not exsit + // This field may not exist if (report.resourceUsage.constrained_memory) { assert(typeof report.resourceUsage.constrained_memory, 'string'); } diff --git a/test/es-module/test-typescript.mjs b/test/es-module/test-typescript.mjs index 496e42178f4a3e..682398be422a44 100644 --- a/test/es-module/test-typescript.mjs +++ b/test/es-module/test-typescript.mjs @@ -159,7 +159,7 @@ test('execute a TypeScript file with type definition but no type keyword', async strictEqual(result.code, 1); }); -test('execute a TypeScript file with type definition but no type keyword with default-type modue', async () => { +test('execute a TypeScript file with type definition but no type keyword with default-type module', async () => { const result = await spawnPromisified(process.execPath, [ '--experimental-strip-types', '--experimental-default-type=module', diff --git a/test/es-module/test-wasm-web-api.js b/test/es-module/test-wasm-web-api.js index d4a81794f80eb3..b199393a18c370 100644 --- a/test/es-module/test-wasm-web-api.js +++ b/test/es-module/test-wasm-web-api.js @@ -173,7 +173,7 @@ function testCompileStreamingRejectionUsingFetch(responseCallback, rejection) { "'application/octet-stream'" }); - // HTTP status code indiciating an error. + // HTTP status code indicating an error. await testCompileStreamingRejectionUsingFetch((res) => { res.statusCode = 418; res.setHeader('Content-Type', 'application/wasm'); @@ -184,7 +184,7 @@ function testCompileStreamingRejectionUsingFetch(responseCallback, rejection) { message: /^WebAssembly response has status code 418$/ }); - // HTTP status code indiciating an error, but using a Response whose body is + // HTTP status code indicating an error, but using a Response whose body is // a Buffer instead of calling fetch(). await testCompileStreamingSuccess(() => { return Promise.resolve(new Response(simpleWasmBytes, { diff --git a/test/fixtures/dotenv/basic-valid.env b/test/fixtures/dotenv/basic-valid.env index 8b7407914c7404..978b9c6fda3bf3 100644 --- a/test/fixtures/dotenv/basic-valid.env +++ b/test/fixtures/dotenv/basic-valid.env @@ -1 +1 @@ -BASIC=overriden +BASIC=overridden diff --git a/test/fixtures/guess-hash-seed.js b/test/fixtures/guess-hash-seed.js index c6166450b4fbb5..13399ef5318239 100644 --- a/test/fixtures/guess-hash-seed.js +++ b/test/fixtures/guess-hash-seed.js @@ -81,7 +81,7 @@ function time_set_lookup(set, value) { // Set with 256 buckets; bucket 0 full, others empty const tester_set_buckets = 256; const tester_set = new Set(); -let tester_set_treshold; +let tester_set_threshold; (function() { // fill bucket 0 and find extra numbers mapping to bucket 0 and a different // bucket `capacity == numBuckets * 2` @@ -114,9 +114,9 @@ let tester_set_treshold; const neg_time = Math.min(...run_repeated(10000, time_set_lookup.bind(null, tester_set, negative_test_value))); - tester_set_treshold = (pos_time + neg_time) / 2; + tester_set_threshold = (pos_time + neg_time) / 2; // console.log(`pos_time: ${pos_time}, neg_time: ${neg_time},`, - // `threshold: ${tester_set_treshold}`); + // `threshold: ${tester_set_threshold}`); })(); // determine hash seed @@ -126,7 +126,7 @@ const slow_str_gen = (function*() { while (1) { const str = `#${strgen_i++}`; for (let i = 0; i < 1000; i++) { - if (time_set_lookup(tester_set, str) < tester_set_treshold) + if (time_set_lookup(tester_set, str) < tester_set_threshold) continue outer; } yield str; diff --git a/test/fixtures/postject-copy/node_modules/commander/lib/help.js b/test/fixtures/postject-copy/node_modules/commander/lib/help.js index 90d9d68cc7c875..d3d05ef4b585cc 100644 --- a/test/fixtures/postject-copy/node_modules/commander/lib/help.js +++ b/test/fixtures/postject-copy/node_modules/commander/lib/help.js @@ -328,11 +328,11 @@ class Help { extraInfo.push(`default: ${argument.defaultValueDescription || JSON.stringify(argument.defaultValue)}`); } if (extraInfo.length > 0) { - const extraDescripton = `(${extraInfo.join(', ')})`; + const extraDescription = `(${extraInfo.join(', ')})`; if (argument.description) { - return `${argument.description} ${extraDescripton}`; + return `${argument.description} ${extraDescription}`; } - return extraDescripton; + return extraDescription; } return argument.description; } diff --git a/test/fixtures/postject-copy/node_modules/postject/README.markdown b/test/fixtures/postject-copy/node_modules/postject/README.markdown index 4acacb20cb3d33..32b3cdfa7723e0 100644 --- a/test/fixtures/postject-copy/node_modules/postject/README.markdown +++ b/test/fixtures/postject-copy/node_modules/postject/README.markdown @@ -34,7 +34,7 @@ Options: -h, --help display help for command ``` -### Using Programatically +### Using Programmatically ```js const { inject } = require('postject'); @@ -67,7 +67,7 @@ $ npm test ## Design -To ensure maximum capatibility and head off unforeseen issues, the +To ensure maximum compatibility and head off unforeseen issues, the implementation for each format tries to use that format's standard practices for embedding binary data. As such, it should be possible to embed the binary data at build-time as well. The CLI provides the diff --git a/test/fixtures/postject-copy/node_modules/postject/dist/api.js b/test/fixtures/postject-copy/node_modules/postject/dist/api.js index 361f45691a4aed..674b744dfbd943 100644 --- a/test/fixtures/postject-copy/node_modules/postject/dist/api.js +++ b/test/fixtures/postject-copy/node_modules/postject/dist/api.js @@ -3213,7 +3213,7 @@ var require_postject = __commonJS({ } function replacePublicSymbol(name, value, numArguments) { if (!Module2.hasOwnProperty(name)) { - throwInternalError("Replacing nonexistant public symbol"); + throwInternalError("Replacing nonexistent public symbol"); } if (void 0 !== Module2[name].overloadTable && void 0 !== numArguments) { Module2[name].overloadTable[numArguments] = value; @@ -5151,7 +5151,7 @@ Use --overwrite to overwrite the existing content` const lastSentinel = buffer.lastIndexOf(sentinelFuse); if (firstSentinel !== lastSentinel) { throw new Error( - `Multiple occurences of sentinel "${sentinelFuse}" found in the binary` + `Multiple occurrences of sentinel "${sentinelFuse}" found in the binary` ); } const colonIndex = firstSentinel + sentinelFuse.length; diff --git a/test/fixtures/snapshot/marked.js b/test/fixtures/snapshot/marked.js index b72d59461b8e1c..727d9f48fca02b 100644 --- a/test/fixtures/snapshot/marked.js +++ b/test/fixtures/snapshot/marked.js @@ -617,7 +617,7 @@ } else { indent = cap[2].search(/[^ ]/); // Find first non-space char - indent = cap[1].length + (indent > 4 ? 1 : indent); // intented code blocks after 4 spaces; indent is always 1 + indent = cap[1].length + (indent > 4 ? 1 : indent); // indented code blocks after 4 spaces; indent is always 1 itemContents = lines[0].slice(indent - cap[1].length); } @@ -2177,19 +2177,19 @@ _proto.getNextSafeSlug = function getNextSafeSlug(originalSlug, isDryRun) { var slug = originalSlug; - var occurenceAccumulator = 0; + var occurrenceAccumulator = 0; if (this.seen.hasOwnProperty(slug)) { - occurenceAccumulator = this.seen[originalSlug]; + occurrenceAccumulator = this.seen[originalSlug]; do { - occurenceAccumulator++; - slug = originalSlug + '-' + occurenceAccumulator; + occurrenceAccumulator++; + slug = originalSlug + '-' + occurrenceAccumulator; } while (this.seen.hasOwnProperty(slug)); } if (!isDryRun) { - this.seen[originalSlug] = occurenceAccumulator; + this.seen[originalSlug] = occurrenceAccumulator; this.seen[slug] = 0; } diff --git a/test/fixtures/snapshot/typescript.js b/test/fixtures/snapshot/typescript.js index 2a4b9f5e84dfbb..d60c2f0eeadccf 100644 --- a/test/fixtures/snapshot/typescript.js +++ b/test/fixtures/snapshot/typescript.js @@ -1903,7 +1903,7 @@ var ts; * This function is used in places where we want to make file name as a key on these systems * It is possible on mac to be able to refer to file name with I with dot on top as a fileName with its lower case form * But on windows we cannot. Windows can have fileName with I with dot on top next to its lower case and they can not each be referred with the lowercase forms - * Technically we would want this function to be platform sepcific as well but + * Technically we would want this function to be platform specific as well but * our api has till now only taken caseSensitive as the only input and just for some characters we dont want to update API and ensure all customers use those api * We could use upper case and we would still need to deal with the descripencies but * we want to continue using lower case since in most cases filenames are lowercasewe and wont need any case changes and avoid having to store another string for the key @@ -4857,8 +4857,8 @@ var ts; // Error handling NodeBuilderFlags[NodeBuilderFlags["AllowThisInObjectLiteral"] = 32768] = "AllowThisInObjectLiteral"; NodeBuilderFlags[NodeBuilderFlags["AllowQualifiedNameInPlaceOfIdentifier"] = 65536] = "AllowQualifiedNameInPlaceOfIdentifier"; - /** @deprecated AllowQualifedNameInPlaceOfIdentifier. Use AllowQualifiedNameInPlaceOfIdentifier instead. */ - NodeBuilderFlags[NodeBuilderFlags["AllowQualifedNameInPlaceOfIdentifier"] = 65536] = "AllowQualifedNameInPlaceOfIdentifier"; + /** @deprecated AllowQualifiedNameInPlaceOfIdentifier. Use AllowQualifiedNameInPlaceOfIdentifier instead. */ + NodeBuilderFlags[NodeBuilderFlags["AllowQualifiedNameInPlaceOfIdentifier"] = 65536] = "AllowQualifiedNameInPlaceOfIdentifier"; NodeBuilderFlags[NodeBuilderFlags["AllowAnonymousIdentifier"] = 131072] = "AllowAnonymousIdentifier"; NodeBuilderFlags[NodeBuilderFlags["AllowEmptyUnionOrIntersection"] = 262144] = "AllowEmptyUnionOrIntersection"; NodeBuilderFlags[NodeBuilderFlags["AllowEmptyTuple"] = 524288] = "AllowEmptyTuple"; @@ -5258,7 +5258,7 @@ var ts; /* @internal */ ObjectFlags[ObjectFlags["ContainsObjectOrArrayLiteral"] = 131072] = "ContainsObjectOrArrayLiteral"; /* @internal */ - ObjectFlags[ObjectFlags["NonInferrableType"] = 262144] = "NonInferrableType"; + ObjectFlags[ObjectFlags["NonInferableType"] = 262144] = "NonInferableType"; /* @internal */ ObjectFlags[ObjectFlags["CouldContainTypeVariablesComputed"] = 524288] = "CouldContainTypeVariablesComputed"; /* @internal */ @@ -5472,7 +5472,7 @@ var ts; // Starting with node12, node's module resolver has significant departures from traditional cjs resolution // to better support ecmascript modules and their use within node - however more features are still being added. // TypeScript's Node ESM support was introduced after Node 12 went end-of-life, and Node 14 is the earliest stable - // version that supports both pattern trailers - *but*, Node 16 is the first version that also supports ECMASCript 2022. + // version that supports both pattern trailers - *but*, Node 16 is the first version that also supports ECMAScript 2022. // In turn, we offer both a `NodeNext` moving resolution target, and a `Node16` version-anchored resolution target ModuleResolutionKind[ModuleResolutionKind["Node16"] = 3] = "Node16"; ModuleResolutionKind[ModuleResolutionKind["NodeNext"] = 99] = "NodeNext"; @@ -7188,7 +7188,7 @@ var ts; if (platform === "win32" || platform === "win64") { return false; } - // If this file exists under a different case, we must be case-insensitve. + // If this file exists under a different case, we must be case-insensitive. return !fileExists(swapCase(__filename)); } /** Convert all lowercase chars to uppercase, and vice-versa */ @@ -8223,8 +8223,8 @@ var ts; /* @internal */ var ts; (function (ts) { - function diag(code, category, key, message, reportsUnnecessary, elidedInCompatabilityPyramid, reportsDeprecated) { - return { code: code, category: category, key: key, message: message, reportsUnnecessary: reportsUnnecessary, elidedInCompatabilityPyramid: elidedInCompatabilityPyramid, reportsDeprecated: reportsDeprecated }; + function diag(code, category, key, message, reportsUnnecessary, elidedInCompatibilityPyramid, reportsDeprecated) { + return { code: code, category: category, key: key, message: message, reportsUnnecessary: reportsUnnecessary, elidedInCompatibilityPyramid: elidedInCompatibilityPyramid, reportsDeprecated: reportsDeprecated }; } ts.Diagnostics = { Unterminated_string_literal: diag(1002, ts.DiagnosticCategory.Error, "Unterminated_string_literal_1002", "Unterminated string literal."), @@ -8597,10 +8597,10 @@ var ts; auto_Colon_Treat_files_with_imports_exports_import_meta_jsx_with_jsx_Colon_react_jsx_or_esm_format_with_module_Colon_node16_as_modules: diag(1476, ts.DiagnosticCategory.Message, "auto_Colon_Treat_files_with_imports_exports_import_meta_jsx_with_jsx_Colon_react_jsx_or_esm_format_w_1476", "\"auto\": Treat files with imports, exports, import.meta, jsx (with jsx: react-jsx), or esm format (with module: node16+) as modules."), The_types_of_0_are_incompatible_between_these_types: diag(2200, ts.DiagnosticCategory.Error, "The_types_of_0_are_incompatible_between_these_types_2200", "The types of '{0}' are incompatible between these types."), The_types_returned_by_0_are_incompatible_between_these_types: diag(2201, ts.DiagnosticCategory.Error, "The_types_returned_by_0_are_incompatible_between_these_types_2201", "The types returned by '{0}' are incompatible between these types."), - Call_signature_return_types_0_and_1_are_incompatible: diag(2202, ts.DiagnosticCategory.Error, "Call_signature_return_types_0_and_1_are_incompatible_2202", "Call signature return types '{0}' and '{1}' are incompatible.", /*reportsUnnecessary*/ undefined, /*elidedInCompatabilityPyramid*/ true), - Construct_signature_return_types_0_and_1_are_incompatible: diag(2203, ts.DiagnosticCategory.Error, "Construct_signature_return_types_0_and_1_are_incompatible_2203", "Construct signature return types '{0}' and '{1}' are incompatible.", /*reportsUnnecessary*/ undefined, /*elidedInCompatabilityPyramid*/ true), - Call_signatures_with_no_arguments_have_incompatible_return_types_0_and_1: diag(2204, ts.DiagnosticCategory.Error, "Call_signatures_with_no_arguments_have_incompatible_return_types_0_and_1_2204", "Call signatures with no arguments have incompatible return types '{0}' and '{1}'.", /*reportsUnnecessary*/ undefined, /*elidedInCompatabilityPyramid*/ true), - Construct_signatures_with_no_arguments_have_incompatible_return_types_0_and_1: diag(2205, ts.DiagnosticCategory.Error, "Construct_signatures_with_no_arguments_have_incompatible_return_types_0_and_1_2205", "Construct signatures with no arguments have incompatible return types '{0}' and '{1}'.", /*reportsUnnecessary*/ undefined, /*elidedInCompatabilityPyramid*/ true), + Call_signature_return_types_0_and_1_are_incompatible: diag(2202, ts.DiagnosticCategory.Error, "Call_signature_return_types_0_and_1_are_incompatible_2202", "Call signature return types '{0}' and '{1}' are incompatible.", /*reportsUnnecessary*/ undefined, /*elidedInCompatibilityPyramid*/ true), + Construct_signature_return_types_0_and_1_are_incompatible: diag(2203, ts.DiagnosticCategory.Error, "Construct_signature_return_types_0_and_1_are_incompatible_2203", "Construct signature return types '{0}' and '{1}' are incompatible.", /*reportsUnnecessary*/ undefined, /*elidedInCompatibilityPyramid*/ true), + Call_signatures_with_no_arguments_have_incompatible_return_types_0_and_1: diag(2204, ts.DiagnosticCategory.Error, "Call_signatures_with_no_arguments_have_incompatible_return_types_0_and_1_2204", "Call signatures with no arguments have incompatible return types '{0}' and '{1}'.", /*reportsUnnecessary*/ undefined, /*elidedInCompatibilityPyramid*/ true), + Construct_signatures_with_no_arguments_have_incompatible_return_types_0_and_1: diag(2205, ts.DiagnosticCategory.Error, "Construct_signatures_with_no_arguments_have_incompatible_return_types_0_and_1_2205", "Construct signatures with no arguments have incompatible return types '{0}' and '{1}'.", /*reportsUnnecessary*/ undefined, /*elidedInCompatibilityPyramid*/ true), The_type_modifier_cannot_be_used_on_a_named_import_when_import_type_is_used_on_its_import_statement: diag(2206, ts.DiagnosticCategory.Error, "The_type_modifier_cannot_be_used_on_a_named_import_when_import_type_is_used_on_its_import_statement_2206", "The 'type' modifier cannot be used on a named import when 'import type' is used on its import statement."), The_type_modifier_cannot_be_used_on_a_named_export_when_export_type_is_used_on_its_export_statement: diag(2207, ts.DiagnosticCategory.Error, "The_type_modifier_cannot_be_used_on_a_named_export_when_export_type_is_used_on_its_export_statement_2207", "The 'type' modifier cannot be used on a named export when 'export type' is used on its export statement."), The_project_root_is_ambiguous_but_is_required_to_resolve_export_map_entry_0_in_file_1_Supply_the_rootDir_compiler_option_to_disambiguate: diag(2209, ts.DiagnosticCategory.Error, "The_project_root_is_ambiguous_but_is_required_to_resolve_export_map_entry_0_in_file_1_Supply_the_roo_2209", "The project root is ambiguous, but is required to resolve export map entry '{0}' in file '{1}'. Supply the `rootDir` compiler option to disambiguate."), @@ -20227,7 +20227,7 @@ var ts; case ts.ModuleDetectionKind.Auto: // If module is nodenext or node16, all esm format files are modules // If jsx is react-jsx or react-jsxdev then jsx tags force module-ness - // otherwise, the presence of import or export statments (or import.meta) implies module-ness + // otherwise, the presence of import or export statements (or import.meta) implies module-ness var checks = [ts.isFileProbablyExternalModule]; if (options.jsx === 4 /* JsxEmit.ReactJSX */ || options.jsx === 5 /* JsxEmit.ReactJSXDev */) { checks.push(isFileModuleFromUsingJSXTag); @@ -20464,7 +20464,7 @@ var ts; return isDirectory ? [ts.getPathFromPathComponents(aParts), ts.getPathFromPathComponents(bParts)] : undefined; } // KLUDGE: Don't assume one 'node_modules' links to another. More likely a single directory inside the node_modules is the symlink. - // ALso, don't assume that an `@foo` directory is linked. More likely the contents of that are linked. + // Also, don't assume that an `@foo` directory is linked. More likely the contents of that are linked. function isNodeModulesOrScopedPackageDirectory(s, getCanonicalFileName) { return s !== undefined && (getCanonicalFileName(s) === "node_modules" || ts.startsWith(s, "@")); } @@ -20783,7 +20783,7 @@ var ts; } ts.getScriptKindFromFileName = getScriptKindFromFileName; /** - * Groups of supported extensions in order of file resolution precedence. (eg, TS > TSX > DTS and seperately, CTS > DCTS) + * Groups of supported extensions in order of file resolution precedence. (eg, TS > TSX > DTS and separately, CTS > DCTS) */ ts.supportedTSExtensions = [[".ts" /* Extension.Ts */, ".tsx" /* Extension.Tsx */, ".d.ts" /* Extension.Dts */], [".cts" /* Extension.Cts */, ".d.cts" /* Extension.Dcts */], [".mts" /* Extension.Mts */, ".d.mts" /* Extension.Dmts */]]; ts.supportedTSExtensionsFlat = ts.flatten(ts.supportedTSExtensions); @@ -21774,7 +21774,7 @@ var ts; : branch; } /** - * [Per the spec](https://tc39.github.io/ecma262/#prod-ExportDeclaration), `export default` accepts _AssigmentExpression_ but + * [Per the spec](https://tc39.github.io/ecma262/#prod-ExportDeclaration), `export default` accepts _AssignmentExpression_ but * has a lookahead restriction for `function`, `async function`, and `class`. * * Basically, that means we need to parenthesize in the following cases: @@ -21829,15 +21829,15 @@ var ts; // TODO(rbuckton): Verify whether this assertion holds. return expression; } - // TODO(rbuckton): Verifiy whether `setTextRange` is needed. + // TODO(rbuckton): Verify whether `setTextRange` is needed. return ts.setTextRange(factory.createParenthesizedExpression(expression), expression); } function parenthesizeOperandOfPostfixUnary(operand) { - // TODO(rbuckton): Verifiy whether `setTextRange` is needed. + // TODO(rbuckton): Verify whether `setTextRange` is needed. return ts.isLeftHandSideExpression(operand) ? operand : ts.setTextRange(factory.createParenthesizedExpression(operand), operand); } function parenthesizeOperandOfPrefixUnary(operand) { - // TODO(rbuckton): Verifiy whether `setTextRange` is needed. + // TODO(rbuckton): Verify whether `setTextRange` is needed. return ts.isUnaryExpression(operand) ? operand : ts.setTextRange(factory.createParenthesizedExpression(operand), operand); } function parenthesizeExpressionsOfCommaDelimitedList(elements) { @@ -21848,7 +21848,7 @@ var ts; var emittedExpression = ts.skipPartiallyEmittedExpressions(expression); var expressionPrecedence = ts.getExpressionPrecedence(emittedExpression); var commaPrecedence = ts.getOperatorPrecedence(221 /* SyntaxKind.BinaryExpression */, 27 /* SyntaxKind.CommaToken */); - // TODO(rbuckton): Verifiy whether `setTextRange` is needed. + // TODO(rbuckton): Verify whether `setTextRange` is needed. return expressionPrecedence > commaPrecedence ? expression : ts.setTextRange(factory.createParenthesizedExpression(expression), expression); } function parenthesizeExpressionOfExpressionStatement(expression) { @@ -21857,21 +21857,21 @@ var ts; var callee = emittedExpression.expression; var kind = ts.skipPartiallyEmittedExpressions(callee).kind; if (kind === 213 /* SyntaxKind.FunctionExpression */ || kind === 214 /* SyntaxKind.ArrowFunction */) { - // TODO(rbuckton): Verifiy whether `setTextRange` is needed. + // TODO(rbuckton): Verify whether `setTextRange` is needed. var updated = factory.updateCallExpression(emittedExpression, ts.setTextRange(factory.createParenthesizedExpression(callee), callee), emittedExpression.typeArguments, emittedExpression.arguments); return factory.restoreOuterExpressions(expression, updated, 8 /* OuterExpressionKinds.PartiallyEmittedExpressions */); } } var leftmostExpressionKind = ts.getLeftmostExpression(emittedExpression, /*stopAtCallExpressions*/ false).kind; if (leftmostExpressionKind === 205 /* SyntaxKind.ObjectLiteralExpression */ || leftmostExpressionKind === 213 /* SyntaxKind.FunctionExpression */) { - // TODO(rbuckton): Verifiy whether `setTextRange` is needed. + // TODO(rbuckton): Verify whether `setTextRange` is needed. return ts.setTextRange(factory.createParenthesizedExpression(expression), expression); } return expression; } function parenthesizeConciseBodyOfArrowFunction(body) { if (!ts.isBlock(body) && (ts.isCommaSequence(body) || ts.getLeftmostExpression(body, /*stopAtCallExpressions*/ false).kind === 205 /* SyntaxKind.ObjectLiteralExpression */)) { - // TODO(rbuckton): Verifiy whether `setTextRange` is needed. + // TODO(rbuckton): Verify whether `setTextRange` is needed. return ts.setTextRange(factory.createParenthesizedExpression(body), body); } return body; @@ -22232,7 +22232,7 @@ var ts; // Lazily load the parenthesizer, node converters, and some factory methods until they are used. var parenthesizerRules = ts.memoize(function () { return flags & 1 /* NodeFactoryFlags.NoParenthesizerRules */ ? ts.nullParenthesizerRules : ts.createParenthesizerRules(factory); }); var converters = ts.memoize(function () { return flags & 2 /* NodeFactoryFlags.NoNodeConverters */ ? ts.nullNodeConverters : ts.createNodeConverters(factory); }); - // lazy initializaton of common operator factories + // lazy initialization of common operator factories var getBinaryCreateFunction = ts.memoizeOne(function (operator) { return function (left, right) { return createBinaryExpression(left, operator, right); }; }); var getPrefixUnaryCreateFunction = ts.memoizeOne(function (operator) { return function (operand) { return createPrefixUnaryExpression(operator, operand); }; }); var getPostfixUnaryCreateFunction = ts.memoizeOne(function (operator) { return function (operand) { return createPostfixUnaryExpression(operand, operator); }; }); @@ -23980,7 +23980,7 @@ var ts; // @api function createArrayLiteralExpression(elements, multiLine) { var node = createBaseExpression(204 /* SyntaxKind.ArrayLiteralExpression */); - // Ensure we add a trailing comma for something like `[NumericLiteral(1), NumericLiteral(2), OmittedExpresion]` so that + // Ensure we add a trailing comma for something like `[NumericLiteral(1), NumericLiteral(2), OmittedExpression]` so that // we end up with `[1, 2, ,]` instead of `[1, 2, ]` otherwise the `OmittedExpression` will just end up being treated like // a trailing comma. var lastElement = elements && ts.lastOrUndefined(elements); @@ -32125,7 +32125,7 @@ var ts; var viableKeywordSuggestions = Object.keys(ts.textToKeywordObj).filter(function (keyword) { return keyword.length > 2; }); /** * Provides a better error message than the generic "';' expected" if possible for - * known common variants of a missing semicolon, such as from a mispelled names. + * known common variants of a missing semicolon, such as from a misspelled names. * * @param node Node preceding the expected semicolon location. */ @@ -41082,7 +41082,7 @@ var ts; // Currently having element option declaration in the tsconfig with type "object" // determines if it needs onSetValidOptionKeyValueInParent callback or not // At moment there are only "compilerOptions", "typeAcquisition" and "typingOptions" - // that satifies it and need it to modify options set in them (for normalizing file paths) + // that satisfies it and need it to modify options set in them (for normalizing file paths) // vs what we set in the json // If need arises, we can modify this interface and callbacks as needed if (option) { @@ -41613,8 +41613,8 @@ var ts; var includeSpecs = _a.includeSpecs, excludeSpecs = _a.excludeSpecs; return ts.createCompilerDiagnostic(ts.Diagnostics.No_inputs_were_found_in_config_file_0_Specified_include_paths_were_1_and_exclude_paths_were_2, configFileName || "tsconfig.json", JSON.stringify(includeSpecs || []), JSON.stringify(excludeSpecs || [])); } - function shouldReportNoInputFiles(fileNames, canJsonReportNoInutFiles, resolutionStack) { - return fileNames.length === 0 && canJsonReportNoInutFiles && (!resolutionStack || resolutionStack.length === 0); + function shouldReportNoInputFiles(fileNames, canJsonReportNoInputFiles, resolutionStack) { + return fileNames.length === 0 && canJsonReportNoInputFiles && (!resolutionStack || resolutionStack.length === 0); } /*@internal*/ function canJsonReportNoInputFiles(raw) { @@ -41622,9 +41622,9 @@ var ts; } ts.canJsonReportNoInputFiles = canJsonReportNoInputFiles; /*@internal*/ - function updateErrorForNoInputFiles(fileNames, configFileName, configFileSpecs, configParseDiagnostics, canJsonReportNoInutFiles) { + function updateErrorForNoInputFiles(fileNames, configFileName, configFileSpecs, configParseDiagnostics, canJsonReportNoInputFiles) { var existingErrors = configParseDiagnostics.length; - if (shouldReportNoInputFiles(fileNames, canJsonReportNoInutFiles)) { + if (shouldReportNoInputFiles(fileNames, canJsonReportNoInputFiles)) { configParseDiagnostics.push(getErrorForNoInputFiles(configFileSpecs, configFileName)); } else { @@ -41984,7 +41984,7 @@ var ts; * @param basePath The base path for any relative file specifications. * @param options Compiler options. * @param host The host used to resolve files and directories. - * @param extraFileExtensions optionaly file extra file extension information from host + * @param extraFileExtensions optionally file extra file extension information from host */ /* @internal */ function getFileNamesFromConfigSpecs(configFileSpecs, basePath, options, host, extraFileExtensions) { @@ -42193,15 +42193,15 @@ var ts; if (match) { // We check this with a few `indexOf` calls because 3 `indexOf`/`lastIndexOf` calls is // less algorithmically complex (roughly O(3n) worst-case) than the regex we used to use, - // \/[^/]*?[*?][^/]*\/ which was polynominal in v8, since arbitrary sequences of wildcard + // \/[^/]*?[*?][^/]*\/ which was polynomial in v8, since arbitrary sequences of wildcard // characters could match any of the central patterns, resulting in bad backtracking. var questionWildcardIndex = spec.indexOf("?"); var starWildcardIndex = spec.indexOf("*"); - var lastDirectorySeperatorIndex = spec.lastIndexOf(ts.directorySeparator); + var lastDirectorySeparatorIndex = spec.lastIndexOf(ts.directorySeparator); return { key: useCaseSensitiveFileNames ? match[0] : ts.toFileNameLowerCase(match[0]), - flags: (questionWildcardIndex !== -1 && questionWildcardIndex < lastDirectorySeperatorIndex) - || (starWildcardIndex !== -1 && starWildcardIndex < lastDirectorySeperatorIndex) + flags: (questionWildcardIndex !== -1 && questionWildcardIndex < lastDirectorySeparatorIndex) + || (starWildcardIndex !== -1 && starWildcardIndex < lastDirectorySeparatorIndex) ? 1 /* WatchDirectoryFlags.Recursive */ : 0 /* WatchDirectoryFlags.None */ }; } @@ -43151,7 +43151,7 @@ var ts; * // it is rooted so it will be final candidate location * } * - * 'rootDirs' allows the project to be spreaded across multiple locations and resolve modules with relative names as if + * 'rootDirs' allows the project to be spread across multiple locations and resolve modules with relative names as if * they were in the same location. For example lets say there are two files * '/local/src/content/file1.ts' * '/shared/components/contracts/src/content/protocols/file2.ts' @@ -43464,7 +43464,7 @@ var ts; } } // esm mode relative imports shouldn't do any directory lookups (either inside `package.json` - // files or implicit `index.js`es). This is a notable depature from cjs norms, where `./foo/pkg` + // files or implicit `index.js`es). This is a notable departure from cjs norms, where `./foo/pkg` // could have been redirected by `./foo/pkg/package.json` to an arbitrary location! if (!(state.features & NodeResolutionFeatures.EsmMode)) { return loadNodeModuleFromDirectory(extensions, candidate, onlyRecordFailures, state, considerPackageJson); @@ -44236,7 +44236,7 @@ var ts; var candidateDir = candidateDirectories_1[_e]; if (ts.startsWith(finalPath, candidateDir)) { // The matched export is looking up something in either the out declaration or js dir, now map the written path back into the source dir and source extension - var pathFragment = finalPath.slice(candidateDir.length + 1); // +1 to also remove directory seperator + var pathFragment = finalPath.slice(candidateDir.length + 1); // +1 to also remove directory separator var possibleInputBase = ts.combinePaths(commonSourceDirGuess, pathFragment); var jsAndDtsExtensions = [".mjs" /* Extension.Mjs */, ".cjs" /* Extension.Cjs */, ".js" /* Extension.Js */, ".json" /* Extension.Json */, ".d.mts" /* Extension.Dmts */, ".d.cts" /* Extension.Dcts */, ".d.ts" /* Extension.Dts */]; for (var _f = 0, jsAndDtsExtensions_1 = jsAndDtsExtensions; _f < jsAndDtsExtensions_1.length; _f++) { @@ -44263,7 +44263,7 @@ var ts; return undefined; function getOutputDirectoriesForBaseDirectory(commonSourceDirGuess) { var _a, _b; - // Config file ouput paths are processed to be relative to the host's current directory, while + // Config file output paths are processed to be relative to the host's current directory, while // otherwise the paths are resolved relative to the common source dir the compiler puts together var currentDir = state.compilerOptions.configFile ? ((_b = (_a = state.host).getCurrentDirectory) === null || _b === void 0 ? void 0 : _b.call(_a)) || "" : commonSourceDirGuess; var candidateDirectories = []; @@ -44505,7 +44505,7 @@ var ts; ts.classicNameResolver = classicNameResolver; /** * A host may load a module from a global cache of typings. - * This is the minumum code needed to expose that functionality; the rest is in the host. + * This is the minimum code needed to expose that functionality; the rest is in the host. */ /* @internal */ function loadModuleFromGlobalCache(moduleName, projectName, compilerOptions, host, globalCache, packageJsonInfoCache) { @@ -45730,7 +45730,7 @@ var ts; addAntecedent(currentReturnTarget, createReduceLabel(finallyLabel, returnLabel.antecedents, currentFlow)); } // If we have an outer exception target (i.e. a containing try-finally or try-catch-finally), add a - // control flow that goes back through the finally blok and back through each possible exception source. + // control flow that goes back through the finally block and back through each possible exception source. if (currentExceptionTarget && exceptionLabel.antecedents) { addAntecedent(currentExceptionTarget, createReduceLabel(finallyLabel, exceptionLabel.antecedents, currentFlow)); } @@ -47821,7 +47821,7 @@ var ts; } visitedTypes[type.id] = type; // Reuse visitSymbol to visit the type's symbol, - // but be sure to bail on recuring into the type if accept declines the symbol. + // but be sure to bail on recurring into the type if accept declines the symbol. var shouldBail = visitSymbol(type.symbol); if (shouldBail) return; @@ -48685,7 +48685,7 @@ var ts; var wildcardType = createIntrinsicType(1 /* TypeFlags.Any */, "any"); var errorType = createIntrinsicType(1 /* TypeFlags.Any */, "error"); var unresolvedType = createIntrinsicType(1 /* TypeFlags.Any */, "unresolved"); - var nonInferrableAnyType = createIntrinsicType(1 /* TypeFlags.Any */, "any", 65536 /* ObjectFlags.ContainsWideningType */); + var nonInferableAnyType = createIntrinsicType(1 /* TypeFlags.Any */, "any", 65536 /* ObjectFlags.ContainsWideningType */); var intrinsicMarkerType = createIntrinsicType(1 /* TypeFlags.Any */, "intrinsic"); var unknownType = createIntrinsicType(2 /* TypeFlags.Unknown */, "unknown"); var nonNullUnknownType = createIntrinsicType(2 /* TypeFlags.Unknown */, "unknown"); @@ -48715,7 +48715,7 @@ var ts; var voidType = createIntrinsicType(16384 /* TypeFlags.Void */, "void"); var neverType = createIntrinsicType(131072 /* TypeFlags.Never */, "never"); var silentNeverType = createIntrinsicType(131072 /* TypeFlags.Never */, "never"); - var nonInferrableType = createIntrinsicType(131072 /* TypeFlags.Never */, "never", 262144 /* ObjectFlags.NonInferrableType */); + var nonInferableType = createIntrinsicType(131072 /* TypeFlags.Never */, "never", 262144 /* ObjectFlags.NonInferableType */); var implicitNeverType = createIntrinsicType(131072 /* TypeFlags.Never */, "never"); var unreachableNeverType = createIntrinsicType(131072 /* TypeFlags.Never */, "never"); var nonPrimitiveType = createIntrinsicType(67108864 /* TypeFlags.NonPrimitive */, "object"); @@ -48740,7 +48740,7 @@ var ts; var anyFunctionType = createAnonymousType(undefined, emptySymbols, ts.emptyArray, ts.emptyArray, ts.emptyArray); // The anyFunctionType contains the anyFunctionType by definition. The flag is further propagated // in getPropagatingFlagsOfTypes, and it is checked in inferFromTypes. - anyFunctionType.objectFlags |= 262144 /* ObjectFlags.NonInferrableType */; + anyFunctionType.objectFlags |= 262144 /* ObjectFlags.NonInferableType */; var noConstraintType = createAnonymousType(undefined, emptySymbols, ts.emptyArray, ts.emptyArray, ts.emptyArray); var circularConstraintType = createAnonymousType(undefined, emptySymbols, ts.emptyArray, ts.emptyArray, ts.emptyArray); var resolvingDefaultType = createAnonymousType(undefined, emptySymbols, ts.emptyArray, ts.emptyArray, ts.emptyArray); @@ -54121,7 +54121,7 @@ var ts; } } function symbolTableToDeclarationStatements(symbolTable, context, bundled) { - var serializePropertySymbolForClass = makeSerializePropertySymbol(ts.factory.createPropertyDeclaration, 169 /* SyntaxKind.MethodDeclaration */, /*useAcessors*/ true); + var serializePropertySymbolForClass = makeSerializePropertySymbol(ts.factory.createPropertyDeclaration, 169 /* SyntaxKind.MethodDeclaration */, /*useAccessors*/ true); var serializePropertySymbolForInterfaceWorker = makeSerializePropertySymbol(function (_decorators, mods, name, question, type) { return ts.factory.createPropertySignature(mods, name, question, type); }, 168 /* SyntaxKind.MethodSignature */, /*useAcessors*/ false); // TODO: Use `setOriginalNode` on original declaration names where possible so these declarations see some kind of // declaration mapping @@ -54809,7 +54809,7 @@ var ts; var staticMembers = ts.flatMap(ts.filter(getPropertiesOfType(staticType), function (p) { return !(p.flags & 4194304 /* SymbolFlags.Prototype */) && p.escapedName !== "prototype" && !isNamespaceMember(p); }), function (p) { return serializePropertySymbolForClass(p, /*isStatic*/ true, staticBaseType); }); // When we encounter an `X.prototype.y` assignment in a JS file, we bind `X` as a class regardless as to whether // the value is ever initialized with a class or function-like value. For cases where `X` could never be - // created via `new`, we will inject a `private constructor()` declaration to indicate it is not createable. + // created via `new`, we will inject a `private constructor()` declaration to indicate it is not creatable. var isNonConstructableClassLikeInJsFile = !isClass && !!symbol.valueDeclaration && ts.isInJSFile(symbol.valueDeclaration) && @@ -56405,10 +56405,10 @@ var ts; reportImplicitAny(element, anyType); } // When we're including the pattern in the type (an indication we're obtaining a contextual type), we - // use the non-inferrable any type. Inference will never directly infer this type, but it is possible + // use the non-inferable any type. Inference will never directly infer this type, but it is possible // to infer a type that contains it, e.g. for a binding pattern like [foo] or { foo }. In such cases, - // widening of the binding pattern type substitutes a regular any for the non-inferrable any. - return includePatternInType ? nonInferrableAnyType : anyType; + // widening of the binding pattern type substitutes a regular any for the non-inferable any. + return includePatternInType ? nonInferableAnyType : anyType; } // Return the type implied by an object binding pattern function getTypeFromObjectBindingPattern(pattern, includePatternInType, reportErrors) { @@ -57276,7 +57276,7 @@ var ts; return isValidBaseType(constraint); } } - // TODO: Given that we allow type parmeters here now, is this `!isGenericMappedType(type)` check really needed? + // TODO: Given that we allow type parameters here now, is this `!isGenericMappedType(type)` check really needed? // There's no reason a `T` should be allowed while a `Readonly` should not. return !!(type.flags & (524288 /* TypeFlags.Object */ | 67108864 /* TypeFlags.NonPrimitive */ | 1 /* TypeFlags.Any */) && !isGenericMappedType(type) || type.flags & 2097152 /* TypeFlags.Intersection */ && ts.every(type.types, isValidBaseType)); @@ -58473,7 +58473,7 @@ var ts; && type.constraintType.type.indexType.flags & 262144 /* TypeFlags.TypeParameter */) { // A reverse mapping of `{[K in keyof T[K_1]]: T[K_1]}` is the same as that of `{[K in keyof T]: T}`, since all we care about is // inferring to the "type parameter" (or indexed access) shared by the constraint and template. So, to reduce the number of - // type identities produced, we simplify such indexed access occurences + // type identities produced, we simplify such indexed access occurrences var newTypeParam = type.constraintType.type.objectType; var newMappedType = replaceIndexedAccess(type.mappedType, type.constraintType.type, newTypeParam); inferredProp.mappedType = newMappedType; @@ -59715,7 +59715,7 @@ var ts; } else { // Parameter has no annotation - // By using a `DeferredType` symbol, we allow the type of this rest arg to be overriden by contextual type assignment so long as its type hasn't been + // By using a `DeferredType` symbol, we allow the type of this rest arg to be overridden by contextual type assignment so long as its type hasn't been // cached by `getTypeOfSymbol` yet. syntheticArgsSymbol.checkFlags |= 65536 /* CheckFlags.DeferredType */; syntheticArgsSymbol.deferralParent = neverType; @@ -60145,7 +60145,7 @@ var ts; } else { var type = getTypeFromTypeNode(constraintDeclaration); - if (type.flags & 1 /* TypeFlags.Any */ && !isErrorType(type)) { // Allow errorType to propegate to keep downstream errors suppressed + if (type.flags & 1 /* TypeFlags.Any */ && !isErrorType(type)) { // Allow errorType to propagate to keep downstream errors suppressed // use keyofConstraintType as the base constraint for mapped type key constraints (unknown isn;t assignable to that, but `any` was), // use unknown otherwise type = constraintDeclaration.parent.parent.kind === 195 /* SyntaxKind.MappedType */ ? keyofConstraintType : unknownType; @@ -62451,7 +62451,7 @@ var ts; propTypes.push(propType); } else if (!accessNode) { - // If there's no error node, we can immeditely stop, since error reporting is off + // If there's no error node, we can immediately stop, since error reporting is off return undefined; } else { @@ -62548,7 +62548,7 @@ var ts; var combinedMapper = void 0; if (root.inferTypeParameters) { // When we're looking at making an inference for an infer type, when we get its constraint, it'll automagically be - // instantiated with the context, so it doesn't need the mapper for the inference contex - however the constraint + // instantiated with the context, so it doesn't need the mapper for the inference context - however the constraint // may refer to another _root_, _uncloned_ `infer` type parameter [1], or to something mapped by `mapper` [2]. // [1] Eg, if we have `Foo` and `Foo` - `B` is constrained to `T`, which, in turn, has been instantiated // as `number` @@ -62576,7 +62576,7 @@ var ts; } } } - // We skip inference of the possible `infer` types unles the `extendsType` _is_ an infer type + // We skip inference of the possible `infer` types unless the `extendsType` _is_ an infer type // if it was, it's trivial to say that extendsType = checkType, however such a pattern is used to // "reset" the type being build up during constraint calculation and avoid making an apparently "infinite" constraint // so in those cases we refain from performing inference and retain the uninfered type parameter @@ -63634,7 +63634,7 @@ var ts; } if (instantiationDepth === 100 || instantiationCount >= 5000000) { // We have reached 100 recursive type instantiations, or 5M type instantiations caused by the same statement - // or expression. There is a very high likelyhood we're dealing with a combination of infinite generic types + // or expression. There is a very high likelihood we're dealing with a combination of infinite generic types // that perpetually generate new type identities, so we stop the recursion here by yielding the error type. ts.tracing === null || ts.tracing === void 0 ? void 0 : ts.tracing.instant("checkTypes" /* tracing.Phase.CheckTypes */, "instantiateType_DepthLimit", { typeId: type.id, instantiationDepth: instantiationDepth, instantiationCount: instantiationCount }); error(currentNode, ts.Diagnostics.Type_instantiation_is_excessively_deep_and_possibly_infinite); @@ -64008,7 +64008,7 @@ var ts; } /** * For every element returned from the iterator, checks that element to issue an error on a property of that element's type - * If that element would issue an error, we first attempt to dive into that element's inner expression and issue a more specific error by recuring into `elaborateError` + * If that element would issue an error, we first attempt to dive into that element's inner expression and issue a more specific error by recurring into `elaborateError` * Otherwise, we issue an error on _every_ element which fail the assignability check */ function elaborateElementwise(iterator, source, target, relation, containingMessageChain, errorOutputContainer) { @@ -64799,7 +64799,7 @@ var ts; case ts.Diagnostics.Call_signatures_with_no_arguments_have_incompatible_return_types_0_and_1.code: case ts.Diagnostics.Construct_signatures_with_no_arguments_have_incompatible_return_types_0_and_1.code: { if (path.length === 0) { - // Don't flatten signature compatability errors at the start of a chain - instead prefer + // Don't flatten signature compatibility errors at the start of a chain - instead prefer // to unify (the with no arguments bit is excessive for printback) and print them back var mappedMsg = msg; if (msg.code === ts.Diagnostics.Call_signatures_with_no_arguments_have_incompatible_return_types_0_and_1.code) { @@ -64846,10 +64846,10 @@ var ts; } for (var _i = 0, secondaryRootErrors_1 = secondaryRootErrors; _i < secondaryRootErrors_1.length; _i++) { var _b = secondaryRootErrors_1[_i], msg = _b[0], args = _b.slice(1); - var originalValue = msg.elidedInCompatabilityPyramid; - msg.elidedInCompatabilityPyramid = false; // Temporarily override elision to ensure error is reported + var originalValue = msg.elidedInCompatibilityPyramid; + msg.elidedInCompatibilityPyramid = false; // Temporarily override elision to ensure error is reported reportError.apply(void 0, __spreadArray([msg], args, false)); - msg.elidedInCompatabilityPyramid = originalValue; + msg.elidedInCompatibilityPyramid = originalValue; } if (info) { // Actually do the last relation error @@ -64860,7 +64860,7 @@ var ts; ts.Debug.assert(!!errorNode); if (incompatibleStack) reportIncompatibleStack(); - if (message.elidedInCompatabilityPyramid) + if (message.elidedInCompatibilityPyramid) return; errorInfo = ts.chainDiagnosticMessages(errorInfo, message, arg0, arg1, arg2, arg3); } @@ -65084,7 +65084,7 @@ var ts; // We suppress recursive intersection property checks because they can generate lots of work when relating // recursive intersections that are structurally similar but not exactly identical. See #37854. if (result_7 && !inPropertyCheck && (target.flags & 2097152 /* TypeFlags.Intersection */ && (isPerformingExcessPropertyChecks || isPerformingCommonPropertyChecks) || - isNonGenericObjectType(target) && !isArrayOrTupleType(target) && source.flags & 2097152 /* TypeFlags.Intersection */ && getApparentType(source).flags & 3670016 /* TypeFlags.StructuredType */ && !ts.some(source.types, function (t) { return !!(ts.getObjectFlags(t) & 262144 /* ObjectFlags.NonInferrableType */); }))) { + isNonGenericObjectType(target) && !isArrayOrTupleType(target) && source.flags & 2097152 /* TypeFlags.Intersection */ && getApparentType(source).flags & 3670016 /* TypeFlags.StructuredType */ && !ts.some(source.types, function (t) { return !!(ts.getObjectFlags(t) & 262144 /* ObjectFlags.NonInferableType */); }))) { inPropertyCheck = true; result_7 &= recursiveTypeRelatedTo(source, target, reportErrors, 4 /* IntersectionState.PropertyCheck */, recursionFlags); inPropertyCheck = false; @@ -65369,7 +65369,7 @@ var ts; return 0 /* Ternary.False */; } function getUndefinedStrippedTargetIfNeeded(source, target) { - // As a builtin type, `undefined` is a very low type ID - making it almsot always first, making this a very fast check to see + // As a builtin type, `undefined` is a very low type ID - making it almost always first, making this a very fast check to see // if we need to strip `undefined` from the target if (source.flags & 1048576 /* TypeFlags.Union */ && target.flags & 1048576 /* TypeFlags.Union */ && !(source.types[0].flags & 32768 /* TypeFlags.Undefined */) && target.types[0].flags & 32768 /* TypeFlags.Undefined */) { @@ -65427,7 +65427,7 @@ var ts; var related = -1 /* Ternary.True */; if (varianceFlags & 8 /* VarianceFlags.Unmeasurable */) { // Even an `Unmeasurable` variance works out without a structural check if the source and target are _identical_. - // We can't simply assume invariance, because `Unmeasurable` marks nonlinear relations, for example, a relation tained by + // We can't simply assume invariance, because `Unmeasurable` marks nonlinear relations, for example, a relation tainted by // the `-?` modifier in a mapped type (where, no matter how the inputs are related, the outputs still might not be) related = relation === identityRelation ? isRelatedTo(s, t, 3 /* RecursionFlags.Both */, /*reportErrors*/ false) : compareTypesIdentical(s, t); } @@ -65474,7 +65474,7 @@ var ts; return 0 /* Ternary.False */; } var keyIntersectionState = intersectionState | (inPropertyCheck ? 8 /* IntersectionState.InPropertyCheck */ : 0); - var id = getRelationKey(source, target, keyIntersectionState, relation, /*ingnoreConstraints*/ false); + var id = getRelationKey(source, target, keyIntersectionState, relation, /*ignoreConstraints*/ false); var entry = relation.get(id); if (entry !== undefined) { if (reportErrors && entry & 2 /* RelationComparisonResult.Failed */ && !(entry & 4 /* RelationComparisonResult.Reported */)) { @@ -65908,7 +65908,7 @@ var ts; } } if (sourceFlags & 8650752 /* TypeFlags.TypeVariable */) { - // IndexedAccess comparisons are handled above in the `targetFlags & TypeFlage.IndexedAccess` branch + // IndexedAccess comparisons are handled above in the `targetFlags & TypeFlags.IndexedAccess` branch if (!(sourceFlags & 8388608 /* TypeFlags.IndexedAccess */ && targetFlags & 8388608 /* TypeFlags.IndexedAccess */)) { var constraint = getConstraintOfType(source); if (!constraint || (sourceFlags & 262144 /* TypeFlags.TypeParameter */ && constraint.flags & 1 /* TypeFlags.Any */)) { @@ -66901,7 +66901,7 @@ var ts; return findMatchingDiscriminantType(source, target, isRelatedTo, /*skipPartial*/ true) || findMatchingTypeReferenceOrTypeAliasReference(source, target) || findBestTypeForObjectLiteral(source, target) || - findBestTypeForInvokable(source, target) || + findBestTypeForInvocable(source, target) || findMostOverlappyType(source, target); } function discriminateTypeByDiscriminableItems(target, discriminators, related, defaultValue, skipPartial) { @@ -67830,7 +67830,7 @@ var ts; } } var result = createAnonymousType(type.symbol, members, ts.emptyArray, ts.emptyArray, ts.sameMap(getIndexInfosOfType(type), function (info) { return createIndexInfo(info.keyType, getWidenedType(info.type), info.isReadonly); })); - result.objectFlags |= (ts.getObjectFlags(type) & (4096 /* ObjectFlags.JSLiteral */ | 262144 /* ObjectFlags.NonInferrableType */)); // Retain js literal flag through widening + result.objectFlags |= (ts.getObjectFlags(type) & (4096 /* ObjectFlags.JSLiteral */ | 262144 /* ObjectFlags.NonInferableType */)); // Retain js literal flag through widening return result; } function getWidenedType(type) { @@ -68210,7 +68210,7 @@ var ts; // literal { a: 123, b: x => true } is marked non-inferable because it contains a context sensitive // arrow function, but is considered partially inferable because property 'a' has an inferable type. function isPartiallyInferableType(type) { - return !(ts.getObjectFlags(type) & 262144 /* ObjectFlags.NonInferrableType */) || + return !(ts.getObjectFlags(type) & 262144 /* ObjectFlags.NonInferableType */) || isObjectLiteralType(type) && ts.some(getPropertiesOfType(type), function (prop) { return isPartiallyInferableType(getTypeOfSymbol(prop)); }) || isTupleType(type) && ts.some(getTypeArguments(type), isPartiallyInferableType); } @@ -68321,7 +68321,7 @@ var ts; return !!(type.symbol && ts.some(type.symbol.declarations, hasSkipDirectInferenceFlag)); } function templateLiteralTypesDefinitelyUnrelated(source, target) { - // Two template literal types with diffences in their starting or ending text spans are definitely unrelated. + // Two template literal types with differences in their starting or ending text spans are definitely unrelated. var sourceStart = source.texts[0]; var targetStart = target.texts[0]; var sourceEnd = source.texts[source.texts.length - 1]; @@ -68345,7 +68345,7 @@ var ts; // * scanning proceeded without error // * a bigint can be scanned, and that when it is scanned, it is // * the full length of the input string (so the scanner is one character beyond the augmented input length) - // * it does not contain a numeric seperator (the `BigInt` constructor does not accept a numeric seperator in its input) + // * it does not contain a numeric separator (the `BigInt` constructor does not accept a numeric separator in its input) return success && result === 9 /* SyntaxKind.BigIntLiteral */ && scanner.getTextPos() === (s.length + 1) && !(flags & 512 /* TokenFlags.ContainsSeparator */); } function isValidTypeForTemplateLiteralPlaceholder(source, target) { @@ -68522,7 +68522,7 @@ var ts; // inferring from 'string[] & { extra: any }' to 'string[] & T' we want to remove string[] and // infer { extra: any } for T. But when inferring to 'string[] & Iterable' we want to keep the // string[] on the source side and infer string for T. - // Likewise, we consider a homomorphic mapped type constrainted to the target type parameter as similar to a "naked type variable" + // Likewise, we consider a homomorphic mapped type constrained to the target type parameter as similar to a "naked type variable" // in such scenarios. if (!(source.flags & 1048576 /* TypeFlags.Union */)) { // Infer between identically matching source and target constituents and remove the matching types. @@ -68545,12 +68545,12 @@ var ts; // not contain anyFunctionType when we come back to this argument for its second round // of inference. Also, we exclude inferences for silentNeverType (which is used as a wildcard // when constructing types from type parameters that had no inference candidates). - if (source === nonInferrableAnyType || source === silentNeverType || (priority & 128 /* InferencePriority.ReturnType */ && (source === autoType || source === autoArrayType)) || isFromInferenceBlockedSource(source)) { + if (source === nonInferableAnyType || source === silentNeverType || (priority & 128 /* InferencePriority.ReturnType */ && (source === autoType || source === autoArrayType)) || isFromInferenceBlockedSource(source)) { return; } var inference = getInferenceInfoForType(target); if (inference) { - if (ts.getObjectFlags(source) & 262144 /* ObjectFlags.NonInferrableType */) { + if (ts.getObjectFlags(source) & 262144 /* ObjectFlags.NonInferableType */) { return; } if (!inference.isFixed) { @@ -68872,10 +68872,10 @@ var ts; if (inference && !inference.isFixed && !isFromInferenceBlockedSource(source)) { var inferredType = inferTypeForHomomorphicMappedType(source, target, constraintType); if (inferredType) { - // We assign a lower priority to inferences made from types containing non-inferrable + // We assign a lower priority to inferences made from types containing non-inferable // types because we may only have a partial result (i.e. we may have failed to make // reverse inferences for some properties). - inferWithPriority(inferredType, inference.typeParameter, ts.getObjectFlags(source) & 262144 /* ObjectFlags.NonInferrableType */ ? + inferWithPriority(inferredType, inference.typeParameter, ts.getObjectFlags(source) & 262144 /* ObjectFlags.NonInferableType */ ? 16 /* InferencePriority.PartialHomomorphicMappedType */ : 8 /* InferencePriority.HomomorphicMappedType */); } @@ -69043,7 +69043,7 @@ var ts; var sourceLen = sourceSignatures.length; var targetLen = targetSignatures.length; var len = sourceLen < targetLen ? sourceLen : targetLen; - var skipParameters = !!(ts.getObjectFlags(source) & 262144 /* ObjectFlags.NonInferrableType */); + var skipParameters = !!(ts.getObjectFlags(source) & 262144 /* ObjectFlags.NonInferableType */); for (var i = 0; i < len; i++) { inferFromSignature(getBaseSignature(sourceSignatures[sourceLen - len + i]), getErasedSignature(targetSignatures[targetLen - len + i]), skipParameters); } @@ -73121,7 +73121,7 @@ var ts; function getJsxManagedAttributesFromLocatedAttributes(context, ns, attributesType) { var managedSym = getJsxLibraryManagedAttributes(ns); if (managedSym) { - var declaredManagedType = getDeclaredTypeOfSymbol(managedSym); // fetches interface type, or initializes symbol links type parmaeters + var declaredManagedType = getDeclaredTypeOfSymbol(managedSym); // fetches interface type, or initializes symbol links type parameters var ctorType = getStaticTypeOfReferencedJsxConstructor(context); if (managedSym.flags & 524288 /* SymbolFlags.TypeAlias */) { var params = getSymbolLinks(managedSym).typeParameters; @@ -76514,7 +76514,7 @@ var ts; // returns a function type, we choose to defer processing. This narrowly permits function composition // operators to flow inferences through return types, but otherwise processes calls right away. We // use the resolvingSignature singleton to indicate that we deferred processing. This result will be - // propagated out and eventually turned into nonInferrableType (a type that is assignable to anything and + // propagated out and eventually turned into nonInferableType (a type that is assignable to anything and // from which we never make inferences). if (checkMode & 8 /* CheckMode.SkipGenericFunctions */ && !node.typeArguments && callSignatures.some(isGenericFunctionReturningFunction)) { skippedGenericFunction(node, checkMode); @@ -76712,14 +76712,14 @@ var ts; ts.Diagnostics.Not_all_constituents_of_type_0_are_constructable, typeToString(apparentType)); } if (hasSignatures) { - // Bail early if we already found a siganture, no chance of "No constituent of type is callable" + // Bail early if we already found a signature, no chance of "No constituent of type is callable" break; } } } if (!hasSignatures) { errorInfo = ts.chainDiagnosticMessages( - /* detials */ undefined, isCall ? + /* details */ undefined, isCall ? ts.Diagnostics.No_constituent_of_type_0_is_callable : ts.Diagnostics.No_constituent_of_type_0_is_constructable, typeToString(apparentType)); } @@ -77067,8 +77067,8 @@ var ts; var signature = getResolvedSignature(node, /*candidatesOutArray*/ undefined, checkMode); if (signature === resolvingSignature) { // CheckMode.SkipGenericFunctions is enabled and this is a call to a generic function that - // returns a function type. We defer checking and return nonInferrableType. - return nonInferrableType; + // returns a function type. We defer checking and return nonInferableType. + return nonInferableType; } checkDeprecatedSignature(signature, node); if (node.expression.kind === 106 /* SyntaxKind.SuperKeyword */) { @@ -78158,7 +78158,7 @@ var ts; var returnType = getReturnTypeFromBody(node, checkMode); var returnOnlySignature = createSignature(undefined, undefined, undefined, ts.emptyArray, returnType, /*resolvedTypePredicate*/ undefined, 0, 0 /* SignatureFlags.None */); var returnOnlyType = createAnonymousType(node.symbol, emptySymbols, [returnOnlySignature], ts.emptyArray, ts.emptyArray); - returnOnlyType.objectFlags |= 262144 /* ObjectFlags.NonInferrableType */; + returnOnlyType.objectFlags |= 262144 /* ObjectFlags.NonInferableType */; return links.contextFreeType = returnOnlyType; } } @@ -78305,7 +78305,7 @@ var ts; function isAssignmentToReadonlyEntity(expr, symbol, assignmentKind) { var _a, _b; if (assignmentKind === 0 /* AssignmentKind.None */) { - // no assigment means it doesn't matter whether the entity is readonly + // no assignment means it doesn't matter whether the entity is readonly return false; } if (isReadonlySymbol(symbol)) { @@ -81245,7 +81245,7 @@ var ts; // SyntaxKind.ElementAccessExpression - `thing["aField"] = 42;` or `thing["aField"];` (with a doc comment on it) // or SyntaxKind.PropertyAccessExpression - `thing.aField = 42;` // all of which are pretty much always values, or at least imply a value meaning. - // It may be apprpriate to treat these as aliases in the future. + // It may be appropriate to treat these as aliases in the future. return 1 /* DeclarationSpaces.ExportValue */; default: return ts.Debug.failBadSyntaxKind(d); @@ -89196,7 +89196,7 @@ var ts; return ts.find(unionTarget.types, function (t) { return !isArrayLikeType(t); }); } } - function findBestTypeForInvokable(source, unionTarget) { + function findBestTypeForInvocable(source, unionTarget) { var signatureKind = 0 /* SignatureKind.Call */; var hasSignatures = getSignaturesOfType(source, signatureKind).length > 0 || (signatureKind = 1 /* SignatureKind.Construct */, getSignaturesOfType(source, signatureKind).length > 0); @@ -91739,8 +91739,8 @@ var ts; function visitElidableStatement(node) { var parsed = ts.getParseTreeNode(node); if (parsed !== node) { - // If the node has been transformed by a `before` transformer, perform no ellision on it - // As the type information we would attempt to lookup to perform ellision is potentially unavailable for the synthesized nodes + // If the node has been transformed by a `before` transformer, perform no elision on it + // As the type information we would attempt to lookup to perform elision is potentially unavailable for the synthesized nodes // We do not reuse `visitorWorker`, as the ellidable statement syntax kinds are technically unrecognized by the switch-case in `visitTypeScript`, // and will trigger debug failures when debug verbosity is turned up if (node.transformFlags & 1 /* TransformFlags.ContainsTypeScript */) { @@ -91760,7 +91760,7 @@ var ts; case 272 /* SyntaxKind.ExportDeclaration */: return visitExportDeclaration(node); default: - ts.Debug.fail("Unhandled ellided statement"); + ts.Debug.fail("Unhandled elided statement"); } } /** @@ -93648,7 +93648,7 @@ var ts; return true; } else { - // For an EnumDeclaration or ModuleDeclaration that merges with a preceeding + // For an EnumDeclaration or ModuleDeclaration that merges with a preceding // declaration we do not emit a leading variable declaration. To preserve the // begin/end semantics of the declararation and to properly handle exports // we wrap the leading variable declaration in a `MergeDeclarationMarker`. @@ -93886,7 +93886,7 @@ var ts; return undefined; } if (!node.exportClause || ts.isNamespaceExport(node.exportClause)) { - // never elide `export from ` declarations - + // never elide `export from ` declarations - // they should be kept for sideffects/untyped exports, even when the // type checker doesn't know about any exports return node; @@ -94203,7 +94203,7 @@ var ts; function trySubstituteNamespaceExportedName(node) { // If this is explicitly a local name, do not substitute. if (enabledSubstitutions & applicableSubstitutions && !ts.isGeneratedIdentifier(node) && !ts.isLocalName(node)) { - // If we are nested within a namespace declaration, we may need to qualifiy + // If we are nested within a namespace declaration, we may need to qualify // an identifier that is exported from a merged namespace. var container = resolver.getReferencedExportContainer(node, /*prefixLocals*/ false); if (container && container.kind !== 305 /* SyntaxKind.SourceFile */) { @@ -104915,7 +104915,7 @@ var ts; } } function createImportCallExpressionAMD(arg, containsLexicalThis) { - // improt("./blah") + // import("./blah") // emit as // define(["require", "exports", "blah"], function (require, exports) { // ... @@ -105355,7 +105355,7 @@ var ts; * @param node The node to visit. */ function visitMergeDeclarationMarker(node) { - // For an EnumDeclaration or ModuleDeclaration that merges with a preceeding + // For an EnumDeclaration or ModuleDeclaration that merges with a preceding // declaration we do not emit a leading variable declaration. To preserve the // begin/end semantics of the declararation and to properly handle exports // we wrapped the leading variable declaration in a `MergeDeclarationMarker`. @@ -106052,7 +106052,7 @@ var ts; // when resolving exports local exported entries/indirect exported entries in the module // should always win over entries with similar names that were added via star exports // to support this we store names of local/indirect exported entries in a set. - // this set is used to filter names brought by star expors. + // this set is used to filter names brought by star exports. // local names set should only be added if we have anything exported if (!moduleInfo.exportedNames && moduleInfo.exportSpecifiers.size === 0) { // no exported declarations (export var ...) or export specifiers (export {x}) @@ -106464,7 +106464,7 @@ var ts; * @param node The node to visit. */ function visitMergeDeclarationMarker(node) { - // For an EnumDeclaration or ModuleDeclaration that merges with a preceeding + // For an EnumDeclaration or ModuleDeclaration that merges with a preceding // declaration we do not emit a leading variable declaration. To preserve the // begin/end semantics of the declararation and to properly handle exports // we wrapped the leading variable declaration in a `MergeDeclarationMarker`. @@ -107461,7 +107461,7 @@ var ts; if (compilerOptions.module !== undefined && compilerOptions.module > ts.ModuleKind.ES2015) { return node; } - // Either ill-formed or don't need to be tranformed. + // Either ill-formed or don't need to be transformed. if (!node.exportClause || !ts.isNamespaceExport(node.exportClause) || !node.moduleSpecifier) { return node; } @@ -108718,7 +108718,7 @@ var ts; lateStatementReplacementMap.set(ts.getOriginalNodeId(i), result); } // And lastly, we need to get the final form of all those indetermine import declarations from before and add them to the output list - // (and remove them from the set to examine for outter declarations) + // (and remove them from the set to examine for outer declarations) return ts.visitNodes(statements, visitLateVisibilityMarkedStatements); function visitLateVisibilityMarkedStatements(statement) { if (ts.isLateVisibilityPaintedStatement(statement)) { @@ -115687,12 +115687,12 @@ var ts; for (var _i = 0; _i < arguments.length; _i++) { args[_i] = arguments[_i]; } - var triggerredInfo = "".concat(key === "watchFile" ? "FileWatcher" : "DirectoryWatcher", ":: Triggered with ").concat(args[0], " ").concat(args[1] !== undefined ? args[1] : "", ":: ").concat(getWatchInfo(file, flags, options, detailInfo1, detailInfo2, getDetailWatchInfo)); - log(triggerredInfo); + var triggeredInfo = "".concat(key === "watchFile" ? "FileWatcher" : "DirectoryWatcher", ":: Triggered with ").concat(args[0], " ").concat(args[1] !== undefined ? args[1] : "", ":: ").concat(getWatchInfo(file, flags, options, detailInfo1, detailInfo2, getDetailWatchInfo)); + log(triggeredInfo); var start = ts.timestamp(); cb.call.apply(cb, __spreadArray([/*thisArg*/ undefined], args, false)); var elapsed = ts.timestamp() - start; - log("Elapsed:: ".concat(elapsed, "ms ").concat(triggerredInfo)); + log("Elapsed:: ".concat(elapsed, "ms ").concat(triggeredInfo)); }, flags, options, detailInfo1, detailInfo2); }; } function getWatchInfo(file, flags, options, detailInfo1, detailInfo2, getDetailWatchInfo) { @@ -117198,7 +117198,7 @@ var ts; return newResolvedRef !== undefined; } }, function (oldProjectReferences, parent) { - // If array of references is changed, we cant resue old program + // If array of references is changed, we cant reuse old program var newReferences = parent ? getResolvedProjectReferenceByPath(parent.sourceFile.path).commandLine.projectReferences : projectReferences; return !ts.arrayIsEqualTo(oldProjectReferences, newReferences, ts.projectReferenceIsEqualTo); }); @@ -117230,7 +117230,7 @@ var ts; var newSourceFiles = []; var modifiedSourceFiles = []; structureIsReused = 2 /* StructureIsReused.Completely */; - // If the missing file paths are now present, it can change the progam structure, + // If the missing file paths are now present, it can change the program structure, // and hence cant reuse the structure. // This is same as how we dont reuse the structure if one of the file from old program is now missing if (oldProgram.getMissingFilePaths().some(function (missingFilePath) { return host.fileExists(missingFilePath); })) { @@ -117891,7 +117891,7 @@ var ts; return ts.createFileDiagnostic(sourceFile, start, nodes.end - start, message, arg0, arg1, arg2); } // Since these are syntactic diagnostics, parent might not have been set - // this means the sourceFile cannot be infered from the node + // this means the sourceFile cannot be inferred from the node function createDiagnosticForNode(node, message, arg0, arg1, arg2) { return ts.createDiagnosticForNodeInSourceFile(sourceFile, node, message, arg0, arg1, arg2); } @@ -119980,7 +119980,7 @@ var ts; return ts.some(sourceFile.moduleAugmentations, function (augmentation) { return ts.isGlobalScopeAugmentation(augmentation.parent); }); } /** - * Return true if the file will invalidate all files because it affectes global scope + * Return true if the file will invalidate all files because it affects global scope */ function isFileAffectingGlobalScope(sourceFile) { return containsGlobalScopeAugmentation(sourceFile) || @@ -123100,7 +123100,7 @@ var ts; WildcardDirectory: "Wild card directory", FailedLookupLocations: "Failed Lookup Locations", TypeRoots: "Type roots", - ConfigFileOfReferencedProject: "Config file of referened project", + ConfigFileOfReferencedProject: "Config file of referenced project", ExtendedConfigOfReferencedProject: "Extended config file of referenced project", WildcardDirectoryOfReferencedProject: "Wild card directory of referenced project", PackageJson: "package.json file", @@ -123563,7 +123563,7 @@ var ts; resolutionCache.updateTypeRootsWatch(); } if (missingFilePathsRequestedForRelease) { - // These are the paths that program creater told us as not in use any more but were missing on the disk. + // These are the paths that program creator told us as not in use any more but were missing on the disk. // We didnt remove the entry for them from sourceFiles cache so that we dont have to do File IO, // if there is already watcher for it (for missing files) // At this point our watches were updated, hence now we know that these paths are not tracked and need to be removed @@ -124102,7 +124102,7 @@ var ts; } ts.getBuildOrderFromAnyBuildOrder = getBuildOrderFromAnyBuildOrder; /** - * Create a function that reports watch status by writing to the system and handles the formating of the diagnostic + * Create a function that reports watch status by writing to the system and handles the formatting of the diagnostic */ function createBuilderStatusReporter(system, pretty) { return function (diagnostic) { @@ -125041,7 +125041,7 @@ var ts; continue; } // If the upstream project has only change .d.ts files, and we've built - // *after* those files, then we're "psuedo up to date" and eligible for a fast rebuild + // *after* those files, then we're "pseudo up to date" and eligible for a fast rebuild if (refStatus.newestDeclarationFileContentChangedTime && refStatus.newestDeclarationFileContentChangedTime <= oldestOutputFileTime) { pseudoUpToDate = true; upstreamChangedProject = ref.path; @@ -125431,7 +125431,7 @@ var ts; ts.clearMap(state.allWatchedExtendedConfigFiles, ts.closeFileWatcherOf); ts.clearMap(state.allWatchedWildcardDirectories, function (watchedWildcardDirectories) { return ts.clearMap(watchedWildcardDirectories, ts.closeFileWatcherOf); }); ts.clearMap(state.allWatchedInputFiles, function (watchedWildcardDirectories) { return ts.clearMap(watchedWildcardDirectories, ts.closeFileWatcher); }); - ts.clearMap(state.allWatchedPackageJsonFiles, function (watchedPacageJsonFiles) { return ts.clearMap(watchedPacageJsonFiles, ts.closeFileWatcher); }); + ts.clearMap(state.allWatchedPackageJsonFiles, function (watchedPackageJsonFiles) { return ts.clearMap(watchedPackageJsonFiles, ts.closeFileWatcher); }); } function createSolutionBuilderWorker(watch, hostOrHostWithWatch, rootNames, options, baseWatchOptions) { var state = createSolutionBuilderState(watch, hostOrHostWithWatch, rootNames, options, baseWatchOptions); @@ -127427,11 +127427,11 @@ var ts; // `EqualTo` causes the `middle` result to be returned // `GreaterThan` causes recursion on the left of the middle // `LessThan` causes recursion on the right of the middle - // Let's say you have 3 nodes, spanning positons + // Let's say you have 3 nodes, spanning positions // pos: 1, end: 3 // pos: 3, end: 3 // pos: 3, end: 5 - // and you're looking for the token at positon 3 - all 3 of these nodes are overlapping with position 3. + // and you're looking for the token at position 3 - all 3 of these nodes are overlapping with position 3. // In fact, there's a _good argument_ that node 2 shouldn't even be allowed to exist - depending on if // the start or end of the ranges are considered inclusive, it's either wholly subsumed by the first or the last node. // Unfortunately, such nodes do exist. :( - See fourslash/completionsImport_tsx.tsx - empty jsx attributes create @@ -129354,7 +129354,7 @@ var ts; var index = ts.binarySearchKey(sortedFileDiagnostics, span, ts.identity, ts.compareTextSpans); if (index >= 0) { var diagnostic = sortedFileDiagnostics[index]; - ts.Debug.assertEqual(diagnostic.file, node.getSourceFile(), "Diagnostics proided to 'findDiagnosticForNode' must be from a single SourceFile"); + ts.Debug.assertEqual(diagnostic.file, node.getSourceFile(), "Diagnostics provided to 'findDiagnosticForNode' must be from a single SourceFile"); return ts.cast(diagnostic, isDiagnosticWithLocation); } } @@ -131655,7 +131655,7 @@ var ts; /** * Check all of the declared modules and those in node modules. Possible sources of modules: * Modules that are found by the type checker - * Modules found relative to "baseUrl" compliler options (including patterns from "paths" compiler option) + * Modules found relative to "baseUrl" compiler options (including patterns from "paths" compiler option) * Modules from node_modules (i.e. those listed in package.json) * This includes all files that are found in node_modules/moduleName/ with acceptable file extensions */ @@ -133671,7 +133671,7 @@ var ts; symbols.push(exportedSymbol); } } - // If the module is merged with a value, we must get the type of the class and add its propertes (for inherited static methods). + // If the module is merged with a value, we must get the type of the class and add its properties (for inherited static methods). if (!isTypeLocation && symbol.declarations && symbol.declarations.some(function (d) { return d.kind !== 305 /* SyntaxKind.SourceFile */ && d.kind !== 261 /* SyntaxKind.ModuleDeclaration */ && d.kind !== 260 /* SyntaxKind.EnumDeclaration */; })) { @@ -133696,7 +133696,7 @@ var ts; } if (!isTypeLocation) { // GH#39946. Pulling on the type of a node inside of a function with a contextual `this` parameter can result in a circularity - // if the `node` is part of the exprssion of a `yield` or `return`. This circularity doesn't exist at compile time because + // if the `node` is part of the expression of a `yield` or `return`. This circularity doesn't exist at compile time because // we will check (and cache) the type of `this` *before* checking the type of the node. typeChecker.tryGetThisTypeAt(node, /*includeGlobalThis*/ false); var type = typeChecker.getTypeAtLocation(node).getNonOptionalType(); @@ -134725,9 +134725,9 @@ var ts; if (ancestorClassLike && contextToken === previousToken && isPreviousPropertyDeclarationTerminated(contextToken, position)) { return false; // Don't block completions. } - var ancestorPropertyDeclaraion = ts.getAncestor(contextToken.parent, 167 /* SyntaxKind.PropertyDeclaration */); + var ancestorPropertyDeclaration = ts.getAncestor(contextToken.parent, 167 /* SyntaxKind.PropertyDeclaration */); // If we are inside a class declaration and typing `constructor` after property declaration... - if (ancestorPropertyDeclaraion + if (ancestorPropertyDeclaration && contextToken !== previousToken && ts.isClassLike(previousToken.parent.parent) // And the cursor is at the token... @@ -134739,8 +134739,8 @@ var ts; else if (contextToken.kind !== 63 /* SyntaxKind.EqualsToken */ // Should not block: `class C { blah = c/**/ }` // But should block: `class C { blah = somewhat c/**/ }` and `class C { blah: SomeType c/**/ }` - && (ts.isInitializedProperty(ancestorPropertyDeclaraion) - || ts.hasType(ancestorPropertyDeclaraion))) { + && (ts.isInitializedProperty(ancestorPropertyDeclaration) + || ts.hasType(ancestorPropertyDeclaration))) { return true; } } @@ -134875,7 +134875,7 @@ var ts; /** * Filters out completion suggestions for class elements. * - * @returns Symbols to be suggested in an class element depending on existing memebers and symbol flags + * @returns Symbols to be suggested in an class element depending on existing members and symbol flags */ function filterClassMembersList(baseSymbols, existingMembers, currentClassElementModifierFlags) { var existingMemberNames = new ts.Set(); @@ -135478,7 +135478,7 @@ var ts; } /** * True if the first character of `lowercaseCharacters` is the first character - * of some "word" in `identiferString` (where the string is split into "words" + * of some "word" in `identifierString` (where the string is split into "words" * by camelCase and snake_case segments), then if the remaining characters of * `lowercaseCharacters` appear, in order, in the rest of `identifierString`. * @@ -143407,8 +143407,8 @@ var ts; // 1. Blocks are effectively redundant with SyntaxLists. // 2. TemplateSpans, along with the SyntaxLists containing them, are a somewhat unintuitive grouping // of things that should be considered independently. - // 3. A VariableStatement’s children are just a VaraiableDeclarationList and a semicolon. - // 4. A lone VariableDeclaration in a VaraibleDeclaration feels redundant with the VariableStatement. + // 3. A VariableStatement’s children are just a VariableDeclarationList and a semicolon. + // 4. A lone VariableDeclaration in a VariableDeclaration feels redundant with the VariableStatement. // Dive in without pushing a selection range. if (ts.isBlock(node) || ts.isTemplateSpan(node) || ts.isTemplateHead(node) || ts.isTemplateTail(node) @@ -148858,7 +148858,7 @@ var ts; for (var _i = 0, comments_2 = comments; _i < comments_2.length; _i++) { var comment = comments_2[_i]; // Single line can break the loop as trivia will only be this line. - // Comments on subsequest lines are also ignored. + // Comments on subsequent lines are also ignored. if (comment.kind === 2 /* SyntaxKind.SingleLineCommentTrivia */ || ts.getLineOfLocalPosition(sourceFile, comment.pos) > nodeEndLine) { break; } @@ -150675,7 +150675,7 @@ var ts; } var commaExpression = ts.findAncestor(token, function (node) { return ts.isExpressionStatement(node.parent) ? true : - isPossiblyPartOfCommaSeperatedInitializer(node) ? false : "quit"; + isPossiblyPartOfCommaSeparatedInitializer(node) ? false : "quit"; }); if (commaExpression) { var checker = program.getTypeChecker(); @@ -150708,7 +150708,7 @@ var ts; undefined; return !!identifier && !checker.getSymbolAtLocation(identifier); } - function isPossiblyPartOfCommaSeperatedInitializer(node) { + function isPossiblyPartOfCommaSeparatedInitializer(node) { switch (node.kind) { case 79 /* SyntaxKind.Identifier */: case 221 /* SyntaxKind.BinaryExpression */: @@ -152402,7 +152402,7 @@ var ts; ? ts.getSynthesizedDeepClonesWithReplacements(nodeOrNodes, /*includeTrivia*/ true, replaceNode) : ts.getSynthesizedDeepCloneWithReplacements(nodeOrNodes, /*includeTrivia*/ true, replaceNode); function replaceNode(original) { - // We are replacing `mod.SomeExport` wih `SomeExport`, so we only need to look at PropertyAccessExpressions + // We are replacing `mod.SomeExport` with `SomeExport`, so we only need to look at PropertyAccessExpressions if (original.kind === 206 /* SyntaxKind.PropertyAccessExpression */) { var replacement = useSitesToUnqualify.get(original); // Remove entry from `useSitesToUnqualify` so the refactor knows it's taken care of by the parent statement we're replacing @@ -152972,7 +152972,7 @@ var ts; break; case 3 /* ImportKind.CommonJS */: case 2 /* ImportKind.Namespace */: - ts.Debug.assert(entry.namespaceLikeImport === undefined || entry.namespaceLikeImport.name === symbolName, "Namespacelike import shoudl be missing or match symbolName"); + ts.Debug.assert(entry.namespaceLikeImport === undefined || entry.namespaceLikeImport.name === symbolName, "Namespacelike import should be missing or match symbolName"); entry.namespaceLikeImport = { importKind: importKind, name: symbolName, addAsTypeOnly: addAsTypeOnly }; break; } @@ -153205,9 +153205,9 @@ var ts; // import * as ns from "foo"; // import { member1, member2 } from "foo"; // - // member3/**/ <-- cusor here + // member3/**/ <-- cursor here // - // in this case we should provie 2 actions: + // in this case we should provide 2 actions: // 1. change "member3" to "ns.member3" // 2. add "member3" to the second import statement's import list // and it is up to the user to decide which one fits best. @@ -159853,7 +159853,7 @@ var ts; function getNewParametersForCombinedSignature(signatureDeclarations) { var lastSig = signatureDeclarations[signatureDeclarations.length - 1]; if (ts.isFunctionLikeDeclaration(lastSig) && lastSig.body) { - // Trim away implementation signature arguments (they should already be compatible with overloads, but are likely less precise to guarantee compatability with the overloads) + // Trim away implementation signature arguments (they should already be compatible with overloads, but are likely less precise to guarantee compatibility with the overloads) signatureDeclarations = signatureDeclarations.slice(0, signatureDeclarations.length - 1); } return ts.factory.createNodeArray([ @@ -162941,7 +162941,7 @@ var ts; continue; } } - /* We compare symbols because in some cases find all references wil return a reference that may or may not be to the refactored function. + /* We compare symbols because in some cases find all references will return a reference that may or may not be to the refactored function. Example from the refactorConvertParamsToDestructuredObject_methodCallUnion.ts test: class A { foo(a: number, b: number) { return a + b; } } class B { foo(c: number, d: number) { return c + d; } } @@ -165199,7 +165199,7 @@ var ts; return documentRegistry.updateDocumentWithKey(fileName, path, host, documentRegistryBucketKey, hostFileInformation.scriptSnapshot, hostFileInformation.version, hostFileInformation.scriptKind); } else { - // Release old source file and fall through to aquire new file with new script kind + // Release old source file and fall through to acquire new file with new script kind documentRegistry.releaseDocumentWithKey(oldSourceFile.resolvedPath, documentRegistry.getKeyForCompilationSettings(program.getCompilerOptions()), oldSourceFile.scriptKind); } } diff --git a/test/fixtures/test-runner/coverage-loader/hooks.mjs b/test/fixtures/test-runner/coverage-loader/hooks.mjs index c2e4b1dfc94628..57ee712ed97c73 100644 --- a/test/fixtures/test-runner/coverage-loader/hooks.mjs +++ b/test/fixtures/test-runner/coverage-loader/hooks.mjs @@ -1,5 +1,5 @@ const sources = { -// Virtual file. Dosen't exist on disk +// Virtual file. Doesn't exist on disk "virtual.js": ` import { test } from 'node:test'; test('test', async () => {}); diff --git a/test/fixtures/test-runner/output/skip_pattern.js b/test/fixtures/test-runner/output/skip_pattern.js index e70d6dc3736fa3..e09e59accc2dd7 100644 --- a/test/fixtures/test-runner/output/skip_pattern.js +++ b/test/fixtures/test-runner/output/skip_pattern.js @@ -17,4 +17,4 @@ it.skip('top level skipped it enabled', common.mustNotCall()); describe('top level describe', common.mustCall()); describe.skip('top level skipped describe disabled', common.mustNotCall()); describe.skip('top level skipped describe enabled', common.mustNotCall()); -test('this will NOt call', common.mustNotCall()); +test('this will NOT call', common.mustNotCall()); diff --git a/test/fixtures/wpt/FileAPI/BlobURL/test2-manual.html b/test/fixtures/wpt/FileAPI/BlobURL/test2-manual.html index 07fb27ef8af10b..f2a5a33d1d5ee9 100644 --- a/test/fixtures/wpt/FileAPI/BlobURL/test2-manual.html +++ b/test/fixtures/wpt/FileAPI/BlobURL/test2-manual.html @@ -51,7 +51,7 @@ xhr.send(); - }, 'Check whether the browser response 500 in XHR if the selected file which File/Blob URL refered is not found'); + }, 'Check whether the browser response 500 in XHR if the selected file which File/Blob URL referred is not found'); done(); diff --git a/test/fixtures/wpt/FileAPI/reading-data-section/Determining-Encoding.any.js b/test/fixtures/wpt/FileAPI/reading-data-section/Determining-Encoding.any.js index 5b69f7ed9821ac..059222636361d9 100644 --- a/test/fixtures/wpt/FileAPI/reading-data-section/Determining-Encoding.any.js +++ b/test/fixtures/wpt/FileAPI/reading-data-section/Determining-Encoding.any.js @@ -1,6 +1,6 @@ // META: title=FileAPI Test: Blob Determining Encoding -var t = async_test("Blob Determing Encoding with encoding argument"); +var t = async_test("Blob Determining Encoding with encoding argument"); t.step(function() { // string 'hello' var data = [0xFE,0xFF,0x00,0x68,0x00,0x65,0x00,0x6C,0x00,0x6C,0x00,0x6F]; @@ -14,7 +14,7 @@ t.step(function() { reader.readAsText(blob, "UTF-16BE"); }); -var t = async_test("Blob Determing Encoding with type attribute"); +var t = async_test("Blob Determining Encoding with type attribute"); t.step(function() { var data = [0xFE,0xFF,0x00,0x68,0x00,0x65,0x00,0x6C,0x00,0x6C,0x00,0x6F]; var blob = new Blob([new Uint8Array(data)], {type:"text/plain;charset=UTF-16BE"}); @@ -28,7 +28,7 @@ t.step(function() { }); -var t = async_test("Blob Determing Encoding with UTF-8 BOM"); +var t = async_test("Blob Determining Encoding with UTF-8 BOM"); t.step(function() { var data = [0xEF,0xBB,0xBF,0x68,0x65,0x6C,0x6C,0xC3,0xB6]; var blob = new Blob([new Uint8Array(data)]); @@ -41,7 +41,7 @@ t.step(function() { reader.readAsText(blob); }); -var t = async_test("Blob Determing Encoding without anything implying charset."); +var t = async_test("Blob Determining Encoding without anything implying charset."); t.step(function() { var data = [0x68,0x65,0x6C,0x6C,0xC3,0xB6]; var blob = new Blob([new Uint8Array(data)]); @@ -54,7 +54,7 @@ t.step(function() { reader.readAsText(blob); }); -var t = async_test("Blob Determing Encoding with UTF-16BE BOM"); +var t = async_test("Blob Determining Encoding with UTF-16BE BOM"); t.step(function() { var data = [0xFE,0xFF,0x00,0x68,0x00,0x65,0x00,0x6C,0x00,0x6C,0x00,0x6F]; var blob = new Blob([new Uint8Array(data)]); @@ -67,7 +67,7 @@ t.step(function() { reader.readAsText(blob); }); -var t = async_test("Blob Determing Encoding with UTF-16LE BOM"); +var t = async_test("Blob Determining Encoding with UTF-16LE BOM"); t.step(function() { var data = [0xFF,0xFE,0x68,0x00,0x65,0x00,0x6C,0x00,0x6C,0x00,0x6F,0x00]; var blob = new Blob([new Uint8Array(data)]); diff --git a/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js b/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js index d6adf21ec33795..39c73c41b42207 100644 --- a/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js +++ b/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js @@ -180,7 +180,7 @@ const formPostFileUploadTest = ({ // Used to verify that the browser agrees with the test about // field value replacement and encoding independently of file system - // idiosyncracies. + // idiosyncrasies. form.append(Object.assign(document.createElement('input'), { type: 'hidden', name: 'filename', diff --git a/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js b/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js index 53c8cca7e09b8e..dd62a0e98e92c8 100644 --- a/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js +++ b/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js @@ -34,7 +34,7 @@ const formDataPostFileUploadTest = ({ // Used to verify that the browser agrees with the test about // field value replacement and encoding independently of file system - // idiosyncracies. + // idiosyncrasies. formData.append("filename", fileBaseName); // Same, but with name and value reversed to ensure field names diff --git a/test/fixtures/wpt/WebCryptoAPI/sign_verify/eddsa_small_order_points.js b/test/fixtures/wpt/WebCryptoAPI/sign_verify/eddsa_small_order_points.js index ae973155992c87..883c3021926fef 100644 --- a/test/fixtures/wpt/WebCryptoAPI/sign_verify/eddsa_small_order_points.js +++ b/test/fixtures/wpt/WebCryptoAPI/sign_verify/eddsa_small_order_points.js @@ -15,7 +15,7 @@ function run_test() { isVerified = await subtle.verify(algorithm, publicKey, test.signature, test.message); } catch (err) { assert_true(publicKey !== undefined, "Public key should be valid."); - assert_unreached("The operation shouldn't fail, but it thown this error: " + err.name + ": " + err.message + "."); + assert_unreached("The operation shouldn't fail, but it threw this error: " + err.name + ": " + err.message + "."); } assert_equals(isVerified, test.verified, "Signature verification result."); }, algorithmName + " Verification checks with small-order key of order - Test " + test.id); diff --git a/test/fixtures/wpt/common/security-features/README.md b/test/fixtures/wpt/common/security-features/README.md index f957541f75ecc3..a5bd45fe4571f3 100644 --- a/test/fixtures/wpt/common/security-features/README.md +++ b/test/fixtures/wpt/common/security-features/README.md @@ -280,7 +280,7 @@ Taking the spec JSON, the generator follows this algorithm: * Expand all ```excluded_tests``` to create a denylist of selections -* For each `specification` entries: Expand the ```test_expansion``` pattern into selections and check each against the denylist, if not marked as suppresed, generate the test resources for the selection +* For each `specification` entries: Expand the ```test_expansion``` pattern into selections and check each against the denylist, if not marked as suppressed, generate the test resources for the selection ### SourceContext Resolution diff --git a/test/fixtures/wpt/compression/decompression-split-chunk.tentative.any.js b/test/fixtures/wpt/compression/decompression-split-chunk.tentative.any.js index eb12c2a2360cd9..604092035e7d55 100644 --- a/test/fixtures/wpt/compression/decompression-split-chunk.tentative.any.js +++ b/test/fixtures/wpt/compression/decompression-split-chunk.tentative.any.js @@ -39,15 +39,15 @@ for (let chunkSize = 1; chunkSize < 16; ++chunkSize) { promise_test(async t => { const decompressedData = await decompressArrayBuffer(compressedBytesWithDeflate, 'deflate', chunkSize); assert_array_equals(decompressedData, expectedChunkValue, "value should match"); - }, `decompressing splitted chunk into pieces of size ${chunkSize} should work in deflate`); + }, `decompressing split chunk into pieces of size ${chunkSize} should work in deflate`); promise_test(async t => { const decompressedData = await decompressArrayBuffer(compressedBytesWithGzip, 'gzip', chunkSize); assert_array_equals(decompressedData, expectedChunkValue, "value should match"); - }, `decompressing splitted chunk into pieces of size ${chunkSize} should work in gzip`); + }, `decompressing split chunk into pieces of size ${chunkSize} should work in gzip`); promise_test(async t => { const decompressedData = await decompressArrayBuffer(compressedBytesWithDeflateRaw, 'deflate-raw', chunkSize); assert_array_equals(decompressedData, expectedChunkValue, "value should match"); - }, `decompressing splitted chunk into pieces of size ${chunkSize} should work in deflate-raw`); + }, `decompressing split chunk into pieces of size ${chunkSize} should work in deflate-raw`); } diff --git a/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-window.html b/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-window.html index ef5ae3daef8158..348dadcb4c50b7 100644 --- a/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-window.html +++ b/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-window.html @@ -31,7 +31,7 @@ function onOverscroll(event) { assert_false(event.cancelable); - // overscroll events targetting document are bubbled to the window. + // overscroll events targeting document are bubbled to the window. assert_true(event.bubbles); window_received_overscroll = true; } diff --git a/test/fixtures/wpt/dom/events/scrolling/scroll_support.js b/test/fixtures/wpt/dom/events/scrolling/scroll_support.js index a708364df07cad..3d7709772baafb 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scroll_support.js +++ b/test/fixtures/wpt/dom/events/scrolling/scroll_support.js @@ -88,7 +88,7 @@ const MAX_UNCHANGED_FRAMES = 20; function waitFor(condition, error_message = 'Reaches the maximum frames.') { return new Promise((resolve, reject) => { function tick(frames) { - // We requestAnimationFrame either for MAX_FRAM frames or until condition + // We requestAnimationFrame either for MAX_FRAME frames or until condition // is met. if (frames >= MAX_FRAME) reject(error_message); diff --git a/test/fixtures/wpt/resource-timing/cached-image-gets-single-entry.html b/test/fixtures/wpt/resource-timing/cached-image-gets-single-entry.html index 2d8c4e2e83944d..bf71615f7b15c5 100644 --- a/test/fixtures/wpt/resource-timing/cached-image-gets-single-entry.html +++ b/test/fixtures/wpt/resource-timing/cached-image-gets-single-entry.html @@ -14,7 +14,7 @@

Description

Test that a reused resource only appears in the buffer once.

diff --git a/test/fixtures/wpt/user-timing/measure-exceptions.html b/test/fixtures/wpt/user-timing/measure-exceptions.html index 2836eaee2a86c1..e89d2685891835 100644 --- a/test/fixtures/wpt/user-timing/measure-exceptions.html +++ b/test/fixtures/wpt/user-timing/measure-exceptions.html @@ -35,7 +35,7 @@ const args = [ 51.15, // Verify that number is parsed as string, not number. - "DoesNotExist", // Non-existant mark name should cause error. + "DoesNotExist", // Non-existent mark name should cause error. ]; args.forEach(each => { test(()=>{ diff --git a/test/fixtures/wpt/user-timing/measure-l3.any.js b/test/fixtures/wpt/user-timing/measure-l3.any.js index 642b55ab635e2c..de27af07198170 100644 --- a/test/fixtures/wpt/user-timing/measure-l3.any.js +++ b/test/fixtures/wpt/user-timing/measure-l3.any.js @@ -32,4 +32,4 @@ test(function() { const measureEntry = performance.measure("A", "mark", "mark"); assert_equals(endTime(measureEntry), markEntry.startTime); assert_equals(measureEntry.startTime, markEntry.startTime); -}, "When start and end mark are both given, the start time and end time of the measure entry should be the the marks' time, repectively"); +}, "When start and end mark are both given, the start time and end time of the measure entry should be the the marks' time, respectively"); diff --git a/test/fixtures/wpt/webmessaging/broadcastchannel/workers.html b/test/fixtures/wpt/webmessaging/broadcastchannel/workers.html index 8b55492f3cffb3..62fa31f623c4b8 100644 --- a/test/fixtures/wpt/webmessaging/broadcastchannel/workers.html +++ b/test/fixtures/wpt/webmessaging/broadcastchannel/workers.html @@ -256,7 +256,7 @@ // Since the worker has closed, it's not expected that the // BroadcastChannel will receive messages (there's a separate test for // that), so just indicate directly that it's ready to test receiving - // a message from the parent dispite the possibility of a race condition. + // a message from the parent despite the possibility of a race condition. postMessage('ready'); } }); diff --git a/test/fixtures/wpt/webstorage/localstorage-about-blank-3P-iframe-opens-3P-window.partitioned.tentative.html b/test/fixtures/wpt/webstorage/localstorage-about-blank-3P-iframe-opens-3P-window.partitioned.tentative.html index de94fb2cf435de..a474a8c0de3746 100644 --- a/test/fixtures/wpt/webstorage/localstorage-about-blank-3P-iframe-opens-3P-window.partitioned.tentative.html +++ b/test/fixtures/wpt/webstorage/localstorage-about-blank-3P-iframe-opens-3P-window.partitioned.tentative.html @@ -57,7 +57,7 @@ for (let id in ids) { assert_true(id !== undefined, "id is not undefined"); } - // Note: we use assert_true, rather than assert_equals becuase we're + // Note: we use assert_true, rather than assert_equals because we're // setting random numbers as IDs - this would mean expectations // files wouldn't work as intended. assert_true(crossSiteIframeAboutBlankID !== crossSiteIframeID, diff --git a/test/fixtures/wpt/webstorage/storage_getitem.window.js b/test/fixtures/wpt/webstorage/storage_getitem.window.js index 8a5896836d2aeb..bd804946223e6a 100644 --- a/test/fixtures/wpt/webstorage/storage_getitem.window.js +++ b/test/fixtures/wpt/webstorage/storage_getitem.window.js @@ -30,5 +30,5 @@ assert_equals(storage.getItem(null), "bar", "storage.getItem(null)") assert_equals(storage.getItem(""), "baz", "storage.getItem('')") }, name + ".getItem should be correct") - }, "Get value by getIten(key) and named access in " + name + "."); + }, "Get value by getItem(key) and named access in " + name + "."); }); diff --git a/test/fixtures/wpt/webstorage/storage_length.window.js b/test/fixtures/wpt/webstorage/storage_length.window.js index 9648e48c8d881b..98566ec79ac628 100644 --- a/test/fixtures/wpt/webstorage/storage_length.window.js +++ b/test/fixtures/wpt/webstorage/storage_length.window.js @@ -19,5 +19,5 @@ storage.setItem("age", "20"); assert_equals(storage.length, 2, "storage.length") - }, name + ".length (proprty access)"); + }, name + ".length (property access)"); }); diff --git a/test/internet/test-uv-threadpool-schedule.js b/test/internet/test-uv-threadpool-schedule.js index 9e61d875f20557..33ddf7fa3f050e 100644 --- a/test/internet/test-uv-threadpool-schedule.js +++ b/test/internet/test-uv-threadpool-schedule.js @@ -1,6 +1,6 @@ 'use strict'; -// Test to validate massive dns lookups do not block filesytem I/O +// Test to validate massive dns lookups do not block filesystem I/O // (or any fast I/O). Prior to https://github.com/libuv/libuv/pull/1845 // few back-to-back dns lookups were sufficient to engage libuv // threadpool workers in a blocking manner, throttling other work items diff --git a/test/js-native-api/test_function/test_function.c b/test/js-native-api/test_function/test_function.c index 02a2988dc3e265..2b53df68d4fc18 100644 --- a/test/js-native-api/test_function/test_function.c +++ b/test/js-native-api/test_function/test_function.c @@ -146,7 +146,7 @@ static napi_value TestBadReturnExceptionPending(napi_env env, napi_callback_info // exception occurs, but we have seen that the C++ wrapper // with exceptions enabled sometimes returns an invalid value // when an exception is thrown. Test that we ignore the return - // value then an exeption is pending. We use 0xFFFFFFFF as a value + // value then an exception is pending. We use 0xFFFFFFFF as a value // that should never be a valid napi_value and node seems to // crash if it is not ignored indicating that it is indeed invalid. return (napi_value)(0xFFFFFFFFF); diff --git a/test/js-native-api/test_object/test_object.c b/test/js-native-api/test_object/test_object.c index b9b3e5c90ae906..6d74128118f997 100644 --- a/test/js-native-api/test_object/test_object.c +++ b/test/js-native-api/test_object/test_object.c @@ -656,7 +656,7 @@ TypeTaggedInstance(napi_env env, napi_callback_info info) { return instance; } -// V8 will not allowe us to construct an external with a NULL data value. +// V8 will not allow us to construct an external with a NULL data value. #define IN_LIEU_OF_NULL ((void*)0x1) static napi_value PlainExternal(napi_env env, napi_callback_info info) { diff --git a/test/js-native-api/test_reference/test.js b/test/js-native-api/test_reference/test.js index 34a1ac70ede24a..6ef84009ea0333 100644 --- a/test/js-native-api/test_reference/test.js +++ b/test/js-native-api/test_reference/test.js @@ -142,7 +142,7 @@ runTests(); // reference (there is a finalizer behind the scenes even // though it cannot be passed to napi_create_reference). // -// Since the order is not guarranteed, run the +// Since the order is not guaranteed, run the // test a number of times maximize the chance that we // get a run with the desired order for the test. // diff --git a/test/js-native-api/test_string/test_string.c b/test/js-native-api/test_string/test_string.c index 01e5dbee3912d8..0dbe1806d7b9c6 100644 --- a/test/js-native-api/test_string/test_string.c +++ b/test/js-native-api/test_string/test_string.c @@ -20,7 +20,7 @@ static napi_status validate_and_retrieve_single_string_arg( NODE_API_ASSERT_STATUS(env, valuetype == napi_string, - "Wrong type of argment. Expects a string."); + "Wrong type of argument. Expects a string."); return napi_ok; } diff --git a/test/node-api/test_init_order/test_init_order.cc b/test/node-api/test_init_order/test_init_order.cc index fc4174c09d8dba..6d76c3d4c0ba61 100644 --- a/test/node-api/test_init_order/test_init_order.cc +++ b/test/node-api/test_init_order/test_init_order.cc @@ -12,7 +12,7 @@ namespace { // In production code developers must avoid dynamic static initializers because // they affect the start up time. They must prefer static initialization such as // use of constexpr functions or classes with constexpr constructors. E.g. -// instead of using std::string, it is preferrable to use const char[], or +// instead of using std::string, it is preferable to use const char[], or // constexpr std::string_view starting with C++17, or even constexpr // std::string starting with C++20. struct MyClass { diff --git a/test/node-api/test_instance_data/test_instance_data.c b/test/node-api/test_instance_data/test_instance_data.c index ef79e3c52b778b..3ae4836ceba133 100644 --- a/test/node-api/test_instance_data/test_instance_data.c +++ b/test/node-api/test_instance_data/test_instance_data.c @@ -139,7 +139,7 @@ static void FinalizeThreadsafeFunction(napi_env env, void* raw, void* hint) { data->tsfn = NULL; } -// Ths function accepts two arguments: the JS callback, and the finalize +// This function accepts two arguments: the JS callback, and the finalize // callback. The latter moves the test forward. static napi_value TestThreadsafeFunction(napi_env env, napi_callback_info info) { diff --git a/test/parallel/parallel.status b/test/parallel/parallel.status index 43db3e8926c890..2f94923799bdea 100644 --- a/test/parallel/parallel.status +++ b/test/parallel/parallel.status @@ -18,7 +18,7 @@ test-fs-read-stream-concurrent-reads: PASS, FLAKY # Until V8 provides a better way to check for flag mismatch without # making the code cache/snapshot unreproducible, disable the test -# for a preemptive check now. It should idealy fail more gracefully +# for a preemptive check now. It should ideally fail more gracefully # with a better checking mechanism. # https://github.com/nodejs/build/issues/3043 test-snapshot-incompatible: SKIP diff --git a/test/parallel/test-blob-file-backed.js b/test/parallel/test-blob-file-backed.js index 2e143e6936f748..6e919d2982f78d 100644 --- a/test/parallel/test-blob-file-backed.js +++ b/test/parallel/test-blob-file-backed.js @@ -134,7 +134,7 @@ writeFileSync(testfile5, ''); })().then(common.mustCall()); (async () => { - // We currently do not allow File-backed blobs to be cloned or transfered + // We currently do not allow File-backed blobs to be cloned or transferred // across worker threads. This is largely because the underlying FdEntry // is bound to the Environment/Realm under which is was created. const blob = await openAsBlob(__filename); diff --git a/test/parallel/test-cli-options-negation.js b/test/parallel/test-cli-options-negation.js index bfbee635ab1a2e..a5ac456d256834 100644 --- a/test/parallel/test-cli-options-negation.js +++ b/test/parallel/test-cli-options-negation.js @@ -21,7 +21,7 @@ assert(spawnWithFlags(['--no-max-http-header-size']).stderr.toString().includes( 'a boolean option', )); -// Inexistant flags cannot be negated. +// Inexistent flags cannot be negated. assert(spawnWithFlags(['--no-i-dont-exist']).stderr.toString().includes( 'bad option: --no-i-dont-exist', )); diff --git a/test/parallel/test-compile-cache-api-tmpdir.js b/test/parallel/test-compile-cache-api-tmpdir.js index 3014be594b9bda..e676893b01f521 100644 --- a/test/parallel/test-compile-cache-api-tmpdir.js +++ b/test/parallel/test-compile-cache-api-tmpdir.js @@ -1,7 +1,7 @@ 'use strict'; // This tests module.enableCompileCache() and module.getCompileCacheDir() work with -// the TMPDIR environemnt variable override. +// the TMPDIR environment variable override. require('../common'); const { spawnSyncAndAssert } = require('../common/child_process'); diff --git a/test/parallel/test-crypto-x509.js b/test/parallel/test-crypto-x509.js index 28fbf308e2d895..15e1f53bb05faf 100644 --- a/test/parallel/test-crypto-x509.js +++ b/test/parallel/test-crypto-x509.js @@ -366,7 +366,7 @@ UcXd/5qu2GhokrKU2cPttU+XAN2Om6a0 // Test date parsing of `validFromDate` and `validToDate` fields, according to RFC 5280. // Validity dates up until the year 2049 are encoded as UTCTime. - // The fomatting of UTCTime changes from the year ~1949 to 1950~. + // The formatting of UTCTime changes from the year ~1949 to 1950~. const certPemUTCTime = `-----BEGIN CERTIFICATE----- MIIE/TCCAuWgAwIBAgIUHbXPaFnjeBehMvdHkXZ+E3a78QswDQYJKoZIhvcNAQEL BQAwDTELMAkGA1UEBhMCS1IwIBgPMTk0OTEyMjUyMzU5NThaFw01MDAxMDEyMzU5 diff --git a/test/parallel/test-debugger-set-context-line-number.mjs b/test/parallel/test-debugger-set-context-line-number.mjs index 5c6e281c1d3b4a..d2351651bdb924 100644 --- a/test/parallel/test-debugger-set-context-line-number.mjs +++ b/test/parallel/test-debugger-set-context-line-number.mjs @@ -15,8 +15,8 @@ function onFatal(error) { } function getLastLine(output) { - const splittedByLine = output.split(';'); - return splittedByLine[splittedByLine.length - 2]; + const splitByLine = output.split(';'); + return splitByLine[splitByLine.length - 2]; } // Stepping through breakpoints. diff --git a/test/parallel/test-fs-readv-promises.js b/test/parallel/test-fs-readv-promises.js index cdfc3d3b21f918..f352fc44d2b5b8 100644 --- a/test/parallel/test-fs-readv-promises.js +++ b/test/parallel/test-fs-readv-promises.js @@ -7,7 +7,7 @@ const tmpdir = require('../common/tmpdir'); tmpdir.refresh(); const expected = 'ümlaut. Лорем 運務ホソモ指及 आपको करने विकास 紙読決多密所 أضف'; -const exptectedBuff = Buffer.from(expected); +const expectedBuff = Buffer.from(expected); let cnt = 0; function getFileName() { @@ -16,7 +16,7 @@ function getFileName() { const allocateEmptyBuffers = (combinedLength) => { const bufferArr = []; - // Allocate two buffers, each half the size of exptectedBuff + // Allocate two buffers, each half the size of expectedBuff bufferArr[0] = Buffer.alloc(Math.floor(combinedLength / 2)); bufferArr[1] = Buffer.alloc(combinedLength - bufferArr[0].length); @@ -26,10 +26,10 @@ const allocateEmptyBuffers = (combinedLength) => { (async () => { { const filename = getFileName(); - await fs.writeFile(filename, exptectedBuff); + await fs.writeFile(filename, expectedBuff); const handle = await fs.open(filename, 'r'); - const bufferArr = allocateEmptyBuffers(exptectedBuff.length); - const expectedLength = exptectedBuff.length; + const bufferArr = allocateEmptyBuffers(expectedBuff.length); + const expectedLength = expectedBuff.length; let { bytesRead, buffers } = await handle.readv([Buffer.from('')], null); @@ -45,10 +45,10 @@ const allocateEmptyBuffers = (combinedLength) => { { const filename = getFileName(); - await fs.writeFile(filename, exptectedBuff); + await fs.writeFile(filename, expectedBuff); const handle = await fs.open(filename, 'r'); - const bufferArr = allocateEmptyBuffers(exptectedBuff.length); - const expectedLength = exptectedBuff.length; + const bufferArr = allocateEmptyBuffers(expectedBuff.length); + const expectedLength = expectedBuff.length; let { bytesRead, buffers } = await handle.readv([Buffer.from('')]); assert.strictEqual(bytesRead, 0); diff --git a/test/parallel/test-fs-readv-sync.js b/test/parallel/test-fs-readv-sync.js index 548f54cbb9e3b2..4f20d9b54cfbc4 100644 --- a/test/parallel/test-fs-readv-sync.js +++ b/test/parallel/test-fs-readv-sync.js @@ -9,15 +9,15 @@ tmpdir.refresh(); const expected = 'ümlaut. Лорем 運務ホソモ指及 आपको करने विकास 紙読決多密所 أضف'; -const exptectedBuff = Buffer.from(expected); -const expectedLength = exptectedBuff.length; +const expectedBuff = Buffer.from(expected); +const expectedLength = expectedBuff.length; const filename = tmpdir.resolve('readv_sync.txt'); -fs.writeFileSync(filename, exptectedBuff); +fs.writeFileSync(filename, expectedBuff); const allocateEmptyBuffers = (combinedLength) => { const bufferArr = []; - // Allocate two buffers, each half the size of exptectedBuff + // Allocate two buffers, each half the size of expectedBuff bufferArr[0] = Buffer.alloc(Math.floor(combinedLength / 2)); bufferArr[1] = Buffer.alloc(combinedLength - bufferArr[0].length); @@ -28,7 +28,7 @@ const allocateEmptyBuffers = (combinedLength) => { { const fd = fs.openSync(filename, 'r'); - const bufferArr = allocateEmptyBuffers(exptectedBuff.length); + const bufferArr = allocateEmptyBuffers(expectedBuff.length); let read = fs.readvSync(fd, [Buffer.from('')], 0); assert.strictEqual(read, 0); @@ -45,7 +45,7 @@ const allocateEmptyBuffers = (combinedLength) => { { const fd = fs.openSync(filename, 'r'); - const bufferArr = allocateEmptyBuffers(exptectedBuff.length); + const bufferArr = allocateEmptyBuffers(expectedBuff.length); let read = fs.readvSync(fd, [Buffer.from('')]); assert.strictEqual(read, 0); diff --git a/test/parallel/test-fs-readv.js b/test/parallel/test-fs-readv.js index 111719a7efbb0f..0a11405d859470 100644 --- a/test/parallel/test-fs-readv.js +++ b/test/parallel/test-fs-readv.js @@ -11,11 +11,11 @@ const expected = 'ümlaut. Лорем 運務ホソモ指及 आपको कर let cnt = 0; const getFileName = () => tmpdir.resolve(`readv_${++cnt}.txt`); -const exptectedBuff = Buffer.from(expected); +const expectedBuff = Buffer.from(expected); const allocateEmptyBuffers = (combinedLength) => { const bufferArr = []; - // Allocate two buffers, each half the size of exptectedBuff + // Allocate two buffers, each half the size of expectedBuff bufferArr[0] = Buffer.alloc(Math.floor(combinedLength / 2)); bufferArr[1] = Buffer.alloc(combinedLength - bufferArr[0].length); @@ -25,11 +25,11 @@ const allocateEmptyBuffers = (combinedLength) => { const getCallback = (fd, bufferArr) => { return common.mustSucceed((bytesRead, buffers) => { assert.deepStrictEqual(bufferArr, buffers); - const expectedLength = exptectedBuff.length; + const expectedLength = expectedBuff.length; assert.deepStrictEqual(bytesRead, expectedLength); fs.closeSync(fd); - assert(Buffer.concat(bufferArr).equals(exptectedBuff)); + assert(Buffer.concat(bufferArr).equals(expectedBuff)); }); }; @@ -37,9 +37,9 @@ const getCallback = (fd, bufferArr) => { { const filename = getFileName(); const fd = fs.openSync(filename, 'w+'); - fs.writeSync(fd, exptectedBuff); + fs.writeSync(fd, expectedBuff); - const bufferArr = allocateEmptyBuffers(exptectedBuff.length); + const bufferArr = allocateEmptyBuffers(expectedBuff.length); const callback = getCallback(fd, bufferArr); fs.readv(fd, bufferArr, 0, callback); @@ -48,10 +48,10 @@ const getCallback = (fd, bufferArr) => { // fs.readv with array of buffers without position { const filename = getFileName(); - fs.writeFileSync(filename, exptectedBuff); + fs.writeFileSync(filename, expectedBuff); const fd = fs.openSync(filename, 'r'); - const bufferArr = allocateEmptyBuffers(exptectedBuff.length); + const bufferArr = allocateEmptyBuffers(expectedBuff.length); const callback = getCallback(fd, bufferArr); fs.readv(fd, bufferArr, callback); @@ -64,7 +64,7 @@ const wrongInputs = [false, 'test', {}, [{}], ['sdf'], null, undefined]; { const filename = getFileName(2); - fs.writeFileSync(filename, exptectedBuff); + fs.writeFileSync(filename, expectedBuff); const fd = fs.openSync(filename, 'r'); for (const wrongInput of wrongInputs) { diff --git a/test/parallel/test-fs-watch-recursive-update-file.js b/test/parallel/test-fs-watch-recursive-update-file.js index 7100b015ab2567..e27a4c37e4f6c3 100644 --- a/test/parallel/test-fs-watch-recursive-update-file.js +++ b/test/parallel/test-fs-watch-recursive-update-file.js @@ -31,7 +31,7 @@ fs.writeFileSync(testFile, 'hello'); const watcher = fs.watch(testDirectory, { recursive: true }); watcher.on('change', common.mustCallAtLeast(function(event, filename) { - // Libuv inconsistenly emits a rename event for the file we are watching + // Libuv inconsistently emits a rename event for the file we are watching assert.ok(event === 'change' || event === 'rename'); if (filename === path.basename(testFile)) { diff --git a/test/parallel/test-inspector-break-when-eval.js b/test/parallel/test-inspector-break-when-eval.js index d5170706a07153..6fc76acd840cad 100644 --- a/test/parallel/test-inspector-break-when-eval.js +++ b/test/parallel/test-inspector-break-when-eval.js @@ -66,7 +66,7 @@ async function stepOverConsoleStatement(session) { } async function runTests() { - // NOTE(mmarchini): Use --inspect-brk to improve avoid undeterministic + // NOTE(mmarchini): Use --inspect-brk to improve avoid indeterministic // behavior. const child = new NodeInstance(['--inspect-brk=0'], undefined, script); const session = await child.connectInspectorSession(); diff --git a/test/parallel/test-net-listen-twice.js b/test/parallel/test-net-listen-twice.js index f3e3f1475b54b3..5a2399ea217005 100644 --- a/test/parallel/test-net-listen-twice.js +++ b/test/parallel/test-net-listen-twice.js @@ -14,7 +14,7 @@ if (cluster.isPrimary) { server.listen(); try { // Currently, we can call `listen` twice in cluster worker, - // if we can not call `listen` twice in the futrue, + // if we can not call `listen` twice in the future, // just skip this test. server.listen(); } catch (e) { diff --git a/test/parallel/test-net-pipe-with-long-path.js b/test/parallel/test-net-pipe-with-long-path.js index 38e020f6698b0e..cebafd6028520e 100644 --- a/test/parallel/test-net-pipe-with-long-path.js +++ b/test/parallel/test-net-pipe-with-long-path.js @@ -20,7 +20,7 @@ const server = net.createServer() .listen(pipePath) // It may work on some operating systems .on('listening', () => { - // The socket file must exsit + // The socket file must exist assert.ok(fs.existsSync(pipePath)); const socket = net.connect(pipePath, common.mustCall(() => { socket.destroy(); diff --git a/test/parallel/test-process-exit-code-validation.js b/test/parallel/test-process-exit-code-validation.js index 9987b58c867c99..86163ddd5f4cda 100644 --- a/test/parallel/test-process-exit-code-validation.js +++ b/test/parallel/test-process-exit-code-validation.js @@ -110,11 +110,11 @@ if (process.argv[2] === undefined) { // Check process.exitCode for (const arg of invalids) { - debug(`invaild code: ${inspect(arg.code)}`); + debug(`invalid code: ${inspect(arg.code)}`); throws(() => (process.exitCode = arg.code), new RegExp(arg.pattern)); } for (const arg of valids) { - debug(`vaild code: ${inspect(arg.code)}`); + debug(`valid code: ${inspect(arg.code)}`); process.exitCode = arg.code; } diff --git a/test/parallel/test-runner-reporters.js b/test/parallel/test-runner-reporters.js index b557cef1b9bef8..bd2b86fc2313d7 100644 --- a/test/parallel/test-runner-reporters.js +++ b/test/parallel/test-runner-reporters.js @@ -13,7 +13,7 @@ tmpdir.refresh(); let tmpFiles = 0; describe('node:test reporters', { concurrency: true }, () => { - it('should default to outputing TAP to stdout', async () => { + it('should default to outputting TAP to stdout', async () => { const child = spawnSync(process.execPath, ['--test', testFile]); assert.strictEqual(child.stderr.toString(), ''); assert.match(child.stdout.toString(), /✖ failing tests:/); @@ -180,15 +180,15 @@ describe('node:test reporters', { concurrency: true }, () => { ['--test', '--test-reporter', 'spec', '--test-reporter-destination', file, testFile]); assert.strictEqual(child.stderr.toString(), ''); assert.strictEqual(child.stdout.toString(), ''); - const fileConent = fs.readFileSync(file, 'utf8'); - assert.match(fileConent, /▶ nested/); - assert.match(fileConent, /✔ ok/); - assert.match(fileConent, /✖ failing/); - assert.match(fileConent, /ℹ tests 4/); - assert.match(fileConent, /ℹ pass 2/); - assert.match(fileConent, /ℹ fail 2/); - assert.match(fileConent, /ℹ cancelled 0/); - assert.match(fileConent, /ℹ skipped 0/); - assert.match(fileConent, /ℹ todo 0/); + const fileContent = fs.readFileSync(file, 'utf8'); + assert.match(fileContent, /▶ nested/); + assert.match(fileContent, /✔ ok/); + assert.match(fileContent, /✖ failing/); + assert.match(fileContent, /ℹ tests 4/); + assert.match(fileContent, /ℹ pass 2/); + assert.match(fileContent, /ℹ fail 2/); + assert.match(fileContent, /ℹ cancelled 0/); + assert.match(fileContent, /ℹ skipped 0/); + assert.match(fileContent, /ℹ todo 0/); }); }); diff --git a/test/parallel/test-unhandled-exception-with-worker-inuse.js b/test/parallel/test-unhandled-exception-with-worker-inuse.js index a3e823ca70bf0f..47c220245d5513 100644 --- a/test/parallel/test-unhandled-exception-with-worker-inuse.js +++ b/test/parallel/test-unhandled-exception-with-worker-inuse.js @@ -10,7 +10,7 @@ const common = require('../common'); // // The root cause of this issue is that before PerIsolateMessageListener() // is invoked by v8, v8 preserves the JS vm state, although it should -// switch to EXTERNEL. https://bugs.chromium.org/p/v8/issues/detail?id=13464 +// switch to EXTERNAL. https://bugs.chromium.org/p/v8/issues/detail?id=13464 // // Therefore, this commit can be considered as an workaround of the v8 bug, // but we also find it not useful to call SetIdle() when terminating. diff --git a/test/parallel/test-url-relative.js b/test/parallel/test-url-relative.js index 9dfe954c1fe854..2751ec3b512584 100644 --- a/test/parallel/test-url-relative.js +++ b/test/parallel/test-url-relative.js @@ -346,7 +346,7 @@ const relativeTests2 = [ 'file:///C:/DEV/Haskell/lib/HXmlToolbox-3.01/examples/mini1.xml'], ['../b/c', 'foo:a/y/z', 'foo:a/b/c'], - // changeing auth + // changing auth ['http://diff:auth@www.example.com', 'http://asdf:qwer@www.example.com', 'http://diff:auth@www.example.com/'], diff --git a/test/parallel/test-v8-query-objects.js b/test/parallel/test-v8-query-objects.js index ea2b99f727f051..8e9616b2f5a3b0 100644 --- a/test/parallel/test-v8-query-objects.js +++ b/test/parallel/test-v8-query-objects.js @@ -50,7 +50,7 @@ common.expectWarning( class TestV8QueryObjectsChildClass extends TestV8QueryObjectsBaseClass {} const summary = v8.queryObjects(TestV8QueryObjectsBaseClass, { format: 'summary' }); // TestV8QueryObjectsChildClass's prototype's [[Prototype]] slot is - // TestV8QueryObjectsBaseClass's prototoype so it shows up in the query. + // TestV8QueryObjectsBaseClass's prototype so it shows up in the query. assert.deepStrictEqual(summary, [ format(TestV8QueryObjectsChildClass.prototype), ]); diff --git a/test/parallel/test-worker-message-port-transfer-duplicate.js b/test/parallel/test-worker-message-port-transfer-duplicate.js index c893556d8d2c48..ad0a2d8aca1f01 100644 --- a/test/parallel/test-worker-message-port-transfer-duplicate.js +++ b/test/parallel/test-worker-message-port-transfer-duplicate.js @@ -3,7 +3,7 @@ const common = require('../common'); const assert = require('assert'); const { MessageChannel } = require('worker_threads'); -// Test that passing duplicate transferrables in the transfer list throws +// Test that passing duplicate transferables in the transfer list throws // DataCloneError exceptions. { diff --git a/test/parallel/test-worker-message-port-wasm-threads.js b/test/parallel/test-worker-message-port-wasm-threads.js index 4174a8951e9340..fe70261fd7b2eb 100644 --- a/test/parallel/test-worker-message-port-wasm-threads.js +++ b/test/parallel/test-worker-message-port-wasm-threads.js @@ -3,7 +3,7 @@ const common = require('../common'); const assert = require('assert'); const { MessageChannel, Worker } = require('worker_threads'); -// Test that SharedArrayBuffer instances created from WASM are transferrable +// Test that SharedArrayBuffer instances created from WASM are transferable // through MessageChannels (without crashing). const fixtures = require('../common/fixtures'); diff --git a/test/sequential/test-error-serdes.js b/test/sequential/test-error-serdes.js index bced9cf40b7326..ed6b7b476c3912 100644 --- a/test/sequential/test-error-serdes.js +++ b/test/sequential/test-error-serdes.js @@ -52,7 +52,7 @@ class ErrorWithCause extends Error { return new Error('err'); } } -class ErrorWithThowingCause extends Error { +class ErrorWithThrowingCause extends Error { get cause() { throw new Error('err'); } @@ -81,9 +81,9 @@ assert.deepStrictEqual(cycle(errorWithCause).cause, { foo: 'bar' }); assert.strictEqual(Object.hasOwn(cycle(errorWithThrowingCause), 'cause'), false); assert.strictEqual(Object.hasOwn(cycle(errorWithCyclicCause), 'cause'), true); assert.deepStrictEqual(cycle(new ErrorWithCause('Error with cause')).cause, new Error('err')); -assert.strictEqual(cycle(new ErrorWithThowingCause('Error with cause')).cause, undefined); -assert.strictEqual(Object.hasOwn(cycle(new ErrorWithThowingCause('Error with cause')), 'cause'), false); -// When the cause is cyclic, it is serialized until Maxiumum call stack size is reached +assert.strictEqual(cycle(new ErrorWithThrowingCause('Error with cause')).cause, undefined); +assert.strictEqual(Object.hasOwn(cycle(new ErrorWithThrowingCause('Error with cause')), 'cause'), false); +// When the cause is cyclic, it is serialized until Maximum call stack size is reached let depth = 0; let e = cycle(new ErrorWithCyclicCause('Error with cause')); while (e.cause) { diff --git a/test/sequential/test-performance-eventloopdelay.js b/test/sequential/test-performance-eventloopdelay.js index 24e5744118cc38..0bc1758113e480 100644 --- a/test/sequential/test-performance-eventloopdelay.js +++ b/test/sequential/test-performance-eventloopdelay.js @@ -101,5 +101,5 @@ const { sleep } = require('internal/util'); } // Make sure that the histogram instances can be garbage-collected without -// and not just implictly destroyed when the Environment is torn down. +// and not just implicitly destroyed when the Environment is torn down. process.on('exit', global.gc); diff --git a/test/sequential/test-worker-eventlooputil.js b/test/sequential/test-worker-eventlooputil.js index 55a3995c3d11c1..396902915e1c17 100644 --- a/test/sequential/test-worker-eventlooputil.js +++ b/test/sequential/test-worker-eventlooputil.js @@ -62,7 +62,7 @@ let workerELU; metricsCh.port2.once('message', mustCall(checkWorkerIdle)); metricsCh.port2.postMessage({ cmd: 'elu' }); // Make sure it's still safe to call eventLoopUtilization() after the worker - // hass been closed. + // has been closed. worker.on('exit', mustCall(() => { assert.deepStrictEqual(worker.performance.eventLoopUtilization(), { idle: 0, active: 0, utilization: 0 }); diff --git a/test/sequential/test-worker-prof.js b/test/sequential/test-worker-prof.js index b98b0be953b150..8c3d91e6c4c310 100644 --- a/test/sequential/test-worker-prof.js +++ b/test/sequential/test-worker-prof.js @@ -76,7 +76,7 @@ if (process.argv[2] === 'child') { console.log('parent ticks', parentTicks.length); // When not tracking Worker threads, only 1 or 2 ticks would // have been recorded. - // prof_sampling_interval is by default 1 millsecond. A higher NODE_TEST_SPIN_MS + // prof_sampling_interval is by default 1 millisecond. A higher NODE_TEST_SPIN_MS // should result in more ticks, while 15 should be safe on most machines. assert(workerTicks.length > 15, `worker ticks <= 15:\n${workerTicks.join('\n')}`); assert(parentTicks.length > 15, `parent ticks <= 15:\n${parentTicks.join('\n')}`);