From 9132d167b51d8bc1ca9111d5c58e2386c3ae593a Mon Sep 17 00:00:00 2001 From: Hafez Date: Sun, 11 Jan 2026 18:57:52 +0100 Subject: [PATCH 1/6] test: add test for assert OOM on large object diff Add a regression test for an issue where assert.strictEqual causes OOM when comparing objects with many converging paths to shared objects. The test creates an object graph similar to Mongoose documents and verifies that the assertion fails with an AssertionError rather than crashing. --- .../test-assert-large-object-diff-oom.js | 184 ++++++++++++++++++ 1 file changed, 184 insertions(+) create mode 100644 test/parallel/test-assert-large-object-diff-oom.js diff --git a/test/parallel/test-assert-large-object-diff-oom.js b/test/parallel/test-assert-large-object-diff-oom.js new file mode 100644 index 00000000000000..e58e7fdaf04934 --- /dev/null +++ b/test/parallel/test-assert-large-object-diff-oom.js @@ -0,0 +1,184 @@ +// Flags: --max-old-space-size=512 +'use strict'; + +// Test that assert.strictEqual does not OOM when comparing objects +// that produce large util.inspect output. +// +// This is a regression test for an issue where objects with many unique +// paths converging on shared objects can cause exponential growth in +// util.inspect output, leading to OOM during assertion error generation. +// +// The fix adds a 2MB limit to inspect output in assertion_error.js + +require('../common'); +const assert = require('assert'); + +// Create an object graph where many unique paths converge on shared objects. +// This delays circular reference detection and creates exponential growth +// in util.inspect output at high depths. + +function createBase() { + const base = { + id: 'base', + models: {}, + schemas: {}, + types: {}, + }; + + for (let i = 0; i < 5; i++) { + base.types[`type_${i}`] = { + name: `type_${i}`, + base, + caster: { base, name: `type_${i}_caster` }, + options: { + base, + validators: [ + { base, name: 'v1' }, + { base, name: 'v2' }, + { base, name: 'v3' }, + ], + }, + }; + } + + return base; +} + +function createSchema(base, name) { + const schema = { + name, + base, + paths: {}, + tree: {}, + virtuals: {}, + }; + + for (let i = 0; i < 10; i++) { + schema.paths[`field_${i}`] = { + path: `field_${i}`, + schema, + instance: base.types[`type_${i % 5}`], + options: { + type: base.types[`type_${i % 5}`], + validators: [ + { validator: () => true, base, schema }, + { validator: () => true, base, schema }, + ], + }, + caster: base.types[`type_${i % 5}`].caster, + }; + } + + schema.childSchemas = []; + for (let i = 0; i < 3; i++) { + const child = { name: `${name}_child_${i}`, base, schema, paths: {} }; + for (let j = 0; j < 5; j++) { + child.paths[`child_field_${j}`] = { + path: `child_field_${j}`, + schema: child, + instance: base.types[`type_${j % 5}`], + options: { base, schema: child }, + }; + } + schema.childSchemas.push(child); + } + + return schema; +} + +function createDocument(schema, base) { + const doc = { + $__: { activePaths: {}, pathsToScopes: {}, populated: {} }, + _doc: { name: 'test' }, + _schema: schema, + _base: base, + }; + + for (let i = 0; i < 10; i++) { + doc.$__.pathsToScopes[`path_${i}`] = { + schema, + base, + type: base.types[`type_${i % 5}`], + }; + } + + for (let i = 0; i < 3; i++) { + const populatedSchema = createSchema(base, `Populated_${i}`); + base.schemas[`Populated_${i}`] = populatedSchema; + + doc.$__.populated[`ref_${i}`] = { + value: { + $__: { pathsToScopes: {}, populated: {} }, + _doc: { id: i }, + _schema: populatedSchema, + _base: base, + }, + options: { path: `ref_${i}`, model: `Model_${i}`, base }, + schema: populatedSchema, + }; + + for (let j = 0; j < 5; j++) { + doc.$__.populated[`ref_${i}`].value.$__.pathsToScopes[`field_${j}`] = { + schema: populatedSchema, + base, + type: base.types[`type_${j % 5}`], + }; + } + } + + return doc; +} + +class Document { + constructor(schema, base) { + Object.assign(this, createDocument(schema, base)); + } +} + +Object.defineProperty(Document.prototype, 'schema', { + get() { return this._schema; }, + enumerable: true, +}); + +Object.defineProperty(Document.prototype, 'base', { + get() { return this._base; }, + enumerable: true, +}); + +// Setup test objects +const base = createBase(); +const schema1 = createSchema(base, 'Schema1'); +const schema2 = createSchema(base, 'Schema2'); +base.schemas.Schema1 = schema1; +base.schemas.Schema2 = schema2; + +const doc1 = new Document(schema1, base); +const doc2 = new Document(schema2, base); +doc2.$__.populated.ref_0.value.$__parent = doc1; + +// The actual OOM test: assert.strictEqual should NOT crash +// when comparing objects with large inspect output. +// It should throw an AssertionError with a reasonable message size. +{ + const actual = doc2.$__.populated.ref_0.value.$__parent; + const expected = doc2; + + // This assertion is expected to fail (they are different objects) + // but it should NOT cause an OOM crash + assert.throws( + () => assert.strictEqual(actual, expected, 'Objects should be equal'), + (err) => { + // Should get an AssertionError, not an OOM crash + assert.ok(err instanceof assert.AssertionError, + 'Expected AssertionError'); + + // Message should exist and be reasonable (not hundreds of MB) + // The fix limits inspect output to 2MB, so message should be bounded + const maxExpectedSize = 5 * 1024 * 1024; // 5MB (2MB * 2 + overhead) + assert.ok(err.message.length < maxExpectedSize, + `Error message too large: ${(err.message.length / 1024 / 1024).toFixed(2)} MB`); + + return true; + } + ); +} From 5bb2cdcf14339699e4eafb5407bd50cd588dae9b Mon Sep 17 00:00:00 2001 From: Hafez Date: Sun, 11 Jan 2026 19:03:41 +0100 Subject: [PATCH 2/6] assert: prevent OOM when generating diff for large objects Objects with many converging paths to shared objects can cause exponential growth in util.inspect output. When assert.strictEqual fails on such objects, the error message generation would OOM while trying to create a diff of the 100+ MB inspect strings. Add a 2MB limit to inspectValue() output. When truncation occurs, a marker is added and the error message indicates lines were skipped. The comparison itself is unaffected; only the error output is truncated. --- lib/internal/assert/assertion_error.js | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/lib/internal/assert/assertion_error.js b/lib/internal/assert/assertion_error.js index 5dbf1e7a341380..23a5336799bdcd 100644 --- a/lib/internal/assert/assertion_error.js +++ b/lib/internal/assert/assertion_error.js @@ -12,6 +12,7 @@ const { ObjectPrototypeHasOwnProperty, SafeSet, String, + StringPrototypeEndsWith, StringPrototypeRepeat, StringPrototypeSlice, StringPrototypeSplit, @@ -42,6 +43,10 @@ const kReadableOperator = { const kMaxShortStringLength = 12; const kMaxLongStringLength = 512; +// Maximum size for inspect output before truncation to prevent OOM. +// Objects with many converging paths can produce exponential growth in +// util.inspect output at high depths, leading to OOM during diff generation. +const kMaxInspectOutputLength = 2 * 1024 * 1024; // 2MB const kMethodsWithCustomMessageDiff = new SafeSet() .add('deepStrictEqual') @@ -69,10 +74,10 @@ function copyError(source) { return target; } -function inspectValue(val) { +function inspectValue(val, maxLength = kMaxInspectOutputLength) { // The util.inspect default values could be changed. This makes sure the // error messages contain the necessary information nevertheless. - return inspect(val, { + const result = inspect(val, { compact: false, customInspect: false, depth: 1000, @@ -85,6 +90,15 @@ function inspectValue(val) { // Inspect getters as we also check them when comparing entries. getters: true, }); + + // Truncate if the output is too large to prevent OOM during diff generation. + // Objects with deeply nested structures can produce exponentially large + // inspect output that causes memory exhaustion when passed to the diff + // algorithm. + if (result.length > maxLength) { + return StringPrototypeSlice(result, 0, maxLength) + '\n... [truncated]'; + } + return result; } function getErrorMessage(operator, message) { @@ -189,6 +203,13 @@ function createErrDiff(actual, expected, operator, customMessage, diffType = 'si let message = ''; const inspectedActual = inspectValue(actual); const inspectedExpected = inspectValue(expected); + + // Check if either value was truncated due to size limits + const truncationMarker = '\n... [truncated]'; + if (StringPrototypeEndsWith(inspectedActual, truncationMarker) || + StringPrototypeEndsWith(inspectedExpected, truncationMarker)) { + skipped = true; + } const inspectedSplitActual = StringPrototypeSplit(inspectedActual, '\n'); const inspectedSplitExpected = StringPrototypeSplit(inspectedExpected, '\n'); const showSimpleDiff = isSimpleDiff(actual, inspectedSplitActual, expected, inspectedSplitExpected); From 89051786869578a335346799958428ce30adf5b7 Mon Sep 17 00:00:00 2001 From: Hafez Date: Sun, 11 Jan 2026 19:20:33 +0100 Subject: [PATCH 3/6] test: add test for assert OOM on large object diff --- .../test-assert-large-object-diff-oom.js | 219 +++++------------- 1 file changed, 63 insertions(+), 156 deletions(-) diff --git a/test/parallel/test-assert-large-object-diff-oom.js b/test/parallel/test-assert-large-object-diff-oom.js index e58e7fdaf04934..5284d199df6096 100644 --- a/test/parallel/test-assert-large-object-diff-oom.js +++ b/test/parallel/test-assert-large-object-diff-oom.js @@ -1,184 +1,91 @@ // Flags: --max-old-space-size=512 'use strict'; -// Test that assert.strictEqual does not OOM when comparing objects -// that produce large util.inspect output. -// -// This is a regression test for an issue where objects with many unique -// paths converging on shared objects can cause exponential growth in -// util.inspect output, leading to OOM during assertion error generation. -// -// The fix adds a 2MB limit to inspect output in assertion_error.js +// Regression test: assert.strictEqual should not OOM when comparing objects +// with many converging paths to shared objects. Such objects cause exponential +// growth in util.inspect output, which previously led to OOM during error +// message generation. require('../common'); const assert = require('assert'); -// Create an object graph where many unique paths converge on shared objects. -// This delays circular reference detection and creates exponential growth -// in util.inspect output at high depths. +// Test: should throw AssertionError, not OOM +{ + const { doc1, doc2 } = createTestObjects(); -function createBase() { - const base = { - id: 'base', - models: {}, - schemas: {}, - types: {}, - }; + assert.throws( + () => assert.strictEqual(doc1, doc2), + (err) => { + assert.ok(err instanceof assert.AssertionError); + // Message should be bounded (fix truncates inspect output at 2MB) + assert.ok(err.message.length < 5 * 1024 * 1024); + return true; + } + ); +} - for (let i = 0; i < 5; i++) { - base.types[`type_${i}`] = { - name: `type_${i}`, - base, - caster: { base, name: `type_${i}_caster` }, - options: { - base, - validators: [ - { base, name: 'v1' }, - { base, name: 'v2' }, - { base, name: 'v3' }, - ], - }, - }; - } +// Creates objects where many paths converge on shared objects, causing +// exponential growth in util.inspect output at high depths. +function createTestObjects() { + const base = createBase(); - return base; -} + const s1 = createSchema(base, 's1'); + const s2 = createSchema(base, 's2'); + base.schemas.s1 = s1; + base.schemas.s2 = s2; -function createSchema(base, name) { - const schema = { - name, - base, - paths: {}, - tree: {}, - virtuals: {}, - }; + const doc1 = createDoc(s1, base); + const doc2 = createDoc(s2, base); - for (let i = 0; i < 10; i++) { - schema.paths[`field_${i}`] = { - path: `field_${i}`, - schema, - instance: base.types[`type_${i % 5}`], - options: { - type: base.types[`type_${i % 5}`], - validators: [ - { validator: () => true, base, schema }, - { validator: () => true, base, schema }, - ], - }, - caster: base.types[`type_${i % 5}`].caster, - }; + // Populated refs create additional converging paths + for (let i = 0; i < 2; i++) { + const ps = createSchema(base, 'p' + i); + base.schemas['p' + i] = ps; + doc1.$__.pop['r' + i] = { value: createDoc(ps, base), opts: { base, schema: ps } }; } - schema.childSchemas = []; - for (let i = 0; i < 3; i++) { - const child = { name: `${name}_child_${i}`, base, schema, paths: {} }; - for (let j = 0; j < 5; j++) { - child.paths[`child_field_${j}`] = { - path: `child_field_${j}`, - schema: child, - instance: base.types[`type_${j % 5}`], - options: { base, schema: child }, - }; - } - schema.childSchemas.push(child); - } + // Cross-link creates more converging paths + doc1.$__.pop.r0.value.$__parent = doc2; - return schema; + return { doc1, doc2 }; } -function createDocument(schema, base) { - const doc = { - $__: { activePaths: {}, pathsToScopes: {}, populated: {} }, - _doc: { name: 'test' }, - _schema: schema, - _base: base, - }; - - for (let i = 0; i < 10; i++) { - doc.$__.pathsToScopes[`path_${i}`] = { - schema, +function createBase() { + const base = { types: {}, schemas: {} }; + for (let i = 0; i < 4; i++) { + base.types['t' + i] = { base, - type: base.types[`type_${i % 5}`], + caster: { base }, + opts: { base, validators: [{ base }, { base }] } }; } + return base; +} - for (let i = 0; i < 3; i++) { - const populatedSchema = createSchema(base, `Populated_${i}`); - base.schemas[`Populated_${i}`] = populatedSchema; - - doc.$__.populated[`ref_${i}`] = { - value: { - $__: { pathsToScopes: {}, populated: {} }, - _doc: { id: i }, - _schema: populatedSchema, - _base: base, - }, - options: { path: `ref_${i}`, model: `Model_${i}`, base }, - schema: populatedSchema, +function createSchema(base, name) { + const schema = { name, base, paths: {}, children: [] }; + for (let i = 0; i < 6; i++) { + schema.paths['f' + i] = { + schema, base, + type: base.types['t' + (i % 4)], + caster: base.types['t' + (i % 4)].caster, + opts: { schema, base, validators: [{ schema, base }] } }; - - for (let j = 0; j < 5; j++) { - doc.$__.populated[`ref_${i}`].value.$__.pathsToScopes[`field_${j}`] = { - schema: populatedSchema, - base, - type: base.types[`type_${j % 5}`], - }; + } + for (let i = 0; i < 2; i++) { + const child = { name: name + '_c' + i, base, parent: schema, paths: {} }; + for (let j = 0; j < 3; j++) { + child.paths['cf' + j] = { schema: child, base, type: base.types['t' + (j % 4)] }; } + schema.children.push(child); } - - return doc; + return schema; } -class Document { - constructor(schema, base) { - Object.assign(this, createDocument(schema, base)); +function createDoc(schema, base) { + const doc = { schema, base, $__: { scopes: {}, pop: {} } }; + for (let i = 0; i < 6; i++) { + doc.$__.scopes['p' + i] = { schema, base, type: base.types['t' + (i % 4)] }; } -} - -Object.defineProperty(Document.prototype, 'schema', { - get() { return this._schema; }, - enumerable: true, -}); - -Object.defineProperty(Document.prototype, 'base', { - get() { return this._base; }, - enumerable: true, -}); - -// Setup test objects -const base = createBase(); -const schema1 = createSchema(base, 'Schema1'); -const schema2 = createSchema(base, 'Schema2'); -base.schemas.Schema1 = schema1; -base.schemas.Schema2 = schema2; - -const doc1 = new Document(schema1, base); -const doc2 = new Document(schema2, base); -doc2.$__.populated.ref_0.value.$__parent = doc1; - -// The actual OOM test: assert.strictEqual should NOT crash -// when comparing objects with large inspect output. -// It should throw an AssertionError with a reasonable message size. -{ - const actual = doc2.$__.populated.ref_0.value.$__parent; - const expected = doc2; - - // This assertion is expected to fail (they are different objects) - // but it should NOT cause an OOM crash - assert.throws( - () => assert.strictEqual(actual, expected, 'Objects should be equal'), - (err) => { - // Should get an AssertionError, not an OOM crash - assert.ok(err instanceof assert.AssertionError, - 'Expected AssertionError'); - - // Message should exist and be reasonable (not hundreds of MB) - // The fix limits inspect output to 2MB, so message should be bounded - const maxExpectedSize = 5 * 1024 * 1024; // 5MB (2MB * 2 + overhead) - assert.ok(err.message.length < maxExpectedSize, - `Error message too large: ${(err.message.length / 1024 / 1024).toFixed(2)} MB`); - - return true; - } - ); + return doc; } From 94f25758ba06bfe90facfc6f6ab4a19841347fb1 Mon Sep 17 00:00:00 2001 From: Hafez Date: Sun, 11 Jan 2026 23:31:48 +0100 Subject: [PATCH 4/6] assert: address review feedback --- lib/internal/assert/assertion_error.js | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/lib/internal/assert/assertion_error.js b/lib/internal/assert/assertion_error.js index 23a5336799bdcd..095bd3ba3fbb40 100644 --- a/lib/internal/assert/assertion_error.js +++ b/lib/internal/assert/assertion_error.js @@ -47,6 +47,7 @@ const kMaxLongStringLength = 512; // Objects with many converging paths can produce exponential growth in // util.inspect output at high depths, leading to OOM during diff generation. const kMaxInspectOutputLength = 2 * 1024 * 1024; // 2MB +const kTruncatedByteMarker = '\n... [truncated]'; const kMethodsWithCustomMessageDiff = new SafeSet() .add('deepStrictEqual') @@ -74,7 +75,7 @@ function copyError(source) { return target; } -function inspectValue(val, maxLength = kMaxInspectOutputLength) { +function inspectValue(val) { // The util.inspect default values could be changed. This makes sure the // error messages contain the necessary information nevertheless. const result = inspect(val, { @@ -95,8 +96,9 @@ function inspectValue(val, maxLength = kMaxInspectOutputLength) { // Objects with deeply nested structures can produce exponentially large // inspect output that causes memory exhaustion when passed to the diff // algorithm. - if (result.length > maxLength) { - return StringPrototypeSlice(result, 0, maxLength) + '\n... [truncated]'; + if (result.length > kMaxInspectOutputLength) { + return StringPrototypeSlice(result, 0, kMaxInspectOutputLength) + + kTruncatedByteMarker; } return result; } @@ -205,9 +207,8 @@ function createErrDiff(actual, expected, operator, customMessage, diffType = 'si const inspectedExpected = inspectValue(expected); // Check if either value was truncated due to size limits - const truncationMarker = '\n... [truncated]'; - if (StringPrototypeEndsWith(inspectedActual, truncationMarker) || - StringPrototypeEndsWith(inspectedExpected, truncationMarker)) { + if (StringPrototypeEndsWith(inspectedActual, kTruncatedByteMarker) || + StringPrototypeEndsWith(inspectedExpected, kTruncatedByteMarker)) { skipped = true; } const inspectedSplitActual = StringPrototypeSplit(inspectedActual, '\n'); From 68712db9198de83c0f379183b1ab4f97cafefe18 Mon Sep 17 00:00:00 2001 From: Hafez Date: Sun, 11 Jan 2026 23:32:26 +0100 Subject: [PATCH 5/6] assert: address PR feedback Co-authored-by: Aviv Keller --- lib/internal/assert/assertion_error.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/internal/assert/assertion_error.js b/lib/internal/assert/assertion_error.js index 095bd3ba3fbb40..a7a680b37fd6f0 100644 --- a/lib/internal/assert/assertion_error.js +++ b/lib/internal/assert/assertion_error.js @@ -100,6 +100,7 @@ function inspectValue(val) { return StringPrototypeSlice(result, 0, kMaxInspectOutputLength) + kTruncatedByteMarker; } + return result; } From 301f1b5d087f98f84d52673fa747892d028d00dc Mon Sep 17 00:00:00 2001 From: Hafez Date: Sat, 7 Feb 2026 00:14:38 +0100 Subject: [PATCH 6/6] assert: add chunked diff and dynamic inspect truncation --- lib/internal/assert/assertion_error.js | 14 +- lib/internal/assert/myers_diff.js | 184 +++++++++++++++++- .../test-assert-large-object-diff-oom.js | 10 +- test/parallel/test-assert-myers-diff.js | 182 +++++++++++++++++ 4 files changed, 376 insertions(+), 14 deletions(-) diff --git a/lib/internal/assert/assertion_error.js b/lib/internal/assert/assertion_error.js index a7a680b37fd6f0..aa29aceec35ef1 100644 --- a/lib/internal/assert/assertion_error.js +++ b/lib/internal/assert/assertion_error.js @@ -19,6 +19,7 @@ const { } = primordials; const { isError } = require('internal/util'); +const { totalmem } = require('os'); const { inspect } = require('internal/util/inspect'); const colors = require('internal/util/colors'); @@ -43,10 +44,15 @@ const kReadableOperator = { const kMaxShortStringLength = 12; const kMaxLongStringLength = 512; -// Maximum size for inspect output before truncation to prevent OOM. -// Objects with many converging paths can produce exponential growth in -// util.inspect output at high depths, leading to OOM during diff generation. -const kMaxInspectOutputLength = 2 * 1024 * 1024; // 2MB +// Truncation limit for inspect output to prevent OOM during diff generation. +// Scaled to system memory: 512KB under 1GB, 1MB under 2GB, 2MB otherwise. +const kGB = 1024 ** 3; +const kMB = 1024 ** 2; +const totalMem = totalmem(); +const kMaxInspectOutputLength = + totalMem < kGB ? kMB / 2 : + totalMem < 2 * kGB ? kMB : + 2 * kMB; const kTruncatedByteMarker = '\n... [truncated]'; const kMethodsWithCustomMessageDiff = new SafeSet() diff --git a/lib/internal/assert/myers_diff.js b/lib/internal/assert/myers_diff.js index ee6359042e31b8..9117a7d3e37c76 100644 --- a/lib/internal/assert/myers_diff.js +++ b/lib/internal/assert/myers_diff.js @@ -2,7 +2,13 @@ const { ArrayPrototypePush, + ArrayPrototypeSlice, Int32Array, + MathFloor, + MathMax, + MathMin, + MathRound, + RegExpPrototypeExec, StringPrototypeEndsWith, } = primordials; @@ -14,7 +20,11 @@ const { const colors = require('internal/util/colors'); +const kChunkSize = 512; const kNopLinesToCollapse = 5; +// Lines that are just structural characters make poor alignment anchors +// because they appear many times and don't uniquely identify a position. +const kTrivialLinePattern = /^\s*[{}[\],]+\s*$/; const kOperations = { DELETE: -1, NOP: 0, @@ -31,19 +41,11 @@ function areLinesEqual(actual, expected, checkCommaDisparity) { return false; } -function myersDiff(actual, expected, checkCommaDisparity = false) { +function myersDiffInternal(actual, expected, checkCommaDisparity) { const actualLength = actual.length; const expectedLength = expected.length; const max = actualLength + expectedLength; - if (max > 2 ** 31 - 1) { - throw new ERR_OUT_OF_RANGE( - 'myersDiff input size', - '< 2^31', - max, - ); - } - const v = new Int32Array(2 * max + 1); const trace = []; @@ -124,6 +126,170 @@ function backtrack(trace, actual, expected, checkCommaDisparity) { return result; } +function myersDiff(actual, expected, checkCommaDisparity = false) { + const actualLength = actual.length; + const expectedLength = expected.length; + const max = actualLength + expectedLength; + + if (max > 2 ** 31 - 1) { + throw new ERR_OUT_OF_RANGE( + 'myersDiff input size', + '< 2^31', + max, + ); + } + + // For small inputs, run the algorithm directly + if (actualLength <= kChunkSize && expectedLength <= kChunkSize) { + return myersDiffInternal(actual, expected, checkCommaDisparity); + } + + const boundaries = findAlignedBoundaries( + actual, expected, checkCommaDisparity, + ); + + // Process chunks and concatenate results (last chunk first for reversed order) + const result = []; + for (let i = boundaries.length - 2; i >= 0; i--) { + const actualStart = boundaries[i].actualIdx; + const actualEnd = boundaries[i + 1].actualIdx; + const expectedStart = boundaries[i].expectedIdx; + const expectedEnd = boundaries[i + 1].expectedIdx; + + const actualChunk = ArrayPrototypeSlice(actual, actualStart, actualEnd); + const expectedChunk = ArrayPrototypeSlice(expected, expectedStart, expectedEnd); + + if (actualChunk.length === 0 && expectedChunk.length === 0) continue; + + if (actualChunk.length === 0) { + for (let j = expectedChunk.length - 1; j >= 0; j--) { + ArrayPrototypePush(result, [kOperations.DELETE, expectedChunk[j]]); + } + continue; + } + + if (expectedChunk.length === 0) { + for (let j = actualChunk.length - 1; j >= 0; j--) { + ArrayPrototypePush(result, [kOperations.INSERT, actualChunk[j]]); + } + continue; + } + + const chunkDiff = myersDiffInternal(actualChunk, expectedChunk, checkCommaDisparity); + for (let j = 0; j < chunkDiff.length; j++) { + ArrayPrototypePush(result, chunkDiff[j]); + } + } + + return result; +} + +function findAlignedBoundaries(actual, expected, checkCommaDisparity) { + const actualLen = actual.length; + const expectedLen = expected.length; + const boundaries = [{ actualIdx: 0, expectedIdx: 0 }]; + const searchRadius = kChunkSize / 2; + + const numTargets = MathMax( + MathFloor((actualLen - 1) / kChunkSize), + 1, + ); + + for (let i = 1; i <= numTargets; i++) { + const targetActual = MathMin(i * kChunkSize, actualLen); + if (targetActual >= actualLen) { + break; + } + + const targetExpected = MathMin( + MathRound(targetActual * expectedLen / actualLen), + expectedLen - 1, + ); + const prevBoundary = boundaries[boundaries.length - 1]; + + const anchor = findAnchorNear( + actual, expected, targetActual, targetExpected, + prevBoundary, searchRadius, checkCommaDisparity, + ); + + if (anchor !== undefined) { + ArrayPrototypePush(boundaries, anchor); + } else { + // Fallback: use proportional position, ensuring strictly increasing + const fallbackActual = MathMax(targetActual, prevBoundary.actualIdx + 1); + const fallbackExpected = MathMax(targetExpected, prevBoundary.expectedIdx + 1); + if (fallbackActual < actualLen && fallbackExpected < expectedLen) { + ArrayPrototypePush(boundaries, { actualIdx: fallbackActual, expectedIdx: fallbackExpected }); + } + } + } + + ArrayPrototypePush(boundaries, { actualIdx: actualLen, expectedIdx: expectedLen }); + return boundaries; +} + +// Search outward from targetActual and targetExpected for a non-trivial +// line that matches in both arrays, with adjacent context verification. +function findAnchorNear(actual, expected, targetActual, targetExpected, + prevBoundary, searchRadius, checkCommaDisparity) { + const actualLen = actual.length; + const expectedLen = expected.length; + + for (let offset = 0; offset <= searchRadius; offset++) { + const candidates = offset === 0 ? [targetActual] : [targetActual + offset, targetActual - offset]; + + for (let i = 0; i < candidates.length; i++) { + const actualIdx = candidates[i]; + if (actualIdx <= prevBoundary.actualIdx || actualIdx >= actualLen) { + continue; + } + + const line = actual[actualIdx]; + if (isTrivialLine(line)) { + continue; + } + + const searchStart = MathMax(prevBoundary.expectedIdx + 1, targetExpected - searchRadius); + const searchEnd = MathMin(expectedLen - 1, targetExpected + searchRadius); + + for (let j = 0; j <= searchRadius; j++) { + const offsets = j === 0 ? [0] : [j, -j]; + for (let k = 0; k < offsets.length; k++) { + const expectedIdx = targetExpected + offsets[k]; + if (expectedIdx < searchStart || expectedIdx > searchEnd || expectedIdx <= prevBoundary.expectedIdx) { + continue; + } + + if ( + areLinesEqual(line, expected[expectedIdx], checkCommaDisparity) && + hasAdjacentMatch(actual, expected, actualIdx, expectedIdx, checkCommaDisparity) + ) { + return { actualIdx, expectedIdx }; + } + } + } + } + } + + return undefined; +} + +function hasAdjacentMatch(actual, expected, actualIdx, expectedIdx, checkCommaDisparity) { + if (actualIdx > 0 && expectedIdx > 0 && + areLinesEqual(actual[actualIdx - 1], expected[expectedIdx - 1], checkCommaDisparity)) { + return true; + } + if (actualIdx < actual.length - 1 && expectedIdx < expected.length - 1 && + areLinesEqual(actual[actualIdx + 1], expected[expectedIdx + 1], checkCommaDisparity)) { + return true; + } + return false; +} + +function isTrivialLine(line) { + return RegExpPrototypeExec(kTrivialLinePattern, line) !== null; +} + function printSimpleMyersDiff(diff) { let message = ''; diff --git a/test/parallel/test-assert-large-object-diff-oom.js b/test/parallel/test-assert-large-object-diff-oom.js index 5284d199df6096..3095513303791c 100644 --- a/test/parallel/test-assert-large-object-diff-oom.js +++ b/test/parallel/test-assert-large-object-diff-oom.js @@ -6,9 +6,17 @@ // growth in util.inspect output, which previously led to OOM during error // message generation. -require('../common'); +const common = require('../common'); +const os = require('os'); const assert = require('assert'); +// This test creates objects with exponential inspect output that requires +// significant memory. Skip on systems with less than 1GB total memory. +const totalMemMB = os.totalmem() / 1024 / 1024; +if (totalMemMB < 1024) { + common.skip(`insufficient system memory (${Math.round(totalMemMB)}MB, need 1024MB)`); +} + // Test: should throw AssertionError, not OOM { const { doc1, doc2 } = createTestObjects(); diff --git a/test/parallel/test-assert-myers-diff.js b/test/parallel/test-assert-myers-diff.js index 31db3cd704ae06..25d8a72c8c634a 100644 --- a/test/parallel/test-assert-myers-diff.js +++ b/test/parallel/test-assert-myers-diff.js @@ -6,6 +6,7 @@ const assert = require('assert'); const { myersDiff } = require('internal/assert/myers_diff'); +// Test: myersDiff input size limit { const arr1 = { length: 2 ** 31 - 1 }; const arr2 = { length: 2 }; @@ -23,3 +24,184 @@ const { myersDiff } = require('internal/assert/myers_diff'); }) ); } + +// Test: small input correctness +{ + const actual = ['a', 'b', 'X', 'c', 'd']; + const expected = ['a', 'b', 'c', 'd']; + const ops = diffToForwardOps(myersDiff(actual, expected)); + + const inserts = ops.filter((o) => o.op === 1); + const deletes = ops.filter((o) => o.op === -1); + const nops = ops.filter((o) => o.op === 0); + + assert.strictEqual(inserts.length, 1); + assert.strictEqual(inserts[0].value, 'X'); + assert.strictEqual(deletes.length, 0); + assert.strictEqual(nops.length, 4); +} + +// Test: aligned boundary correctness - extra lines in the middle +// When expected has extra lines, aligned boundaries should produce +// only real INSERT/DELETE/NOP operations with no phantom diffs. +{ + const { actual, expected } = createAlignedTestArrays({ lineCount: 600, extraLineAt: 100 }); + + const result = myersDiff(actual, expected); + const ops = diffToForwardOps(result); + + const inserts = ops.filter((o) => o.op === 1); + const deletes = ops.filter((o) => o.op === -1); + const nops = ops.filter((o) => o.op === 0); + + assert.strictEqual(inserts.length, 1); + assert.strictEqual(inserts[0].value, 'EXTRA_100'); + assert.strictEqual(deletes.length, 0, 'should produce no phantom DELETEs'); + assert.strictEqual(nops.length, 600); +} + +// Test: multiple extra lines across chunk boundaries +{ + const expected = []; + for (let i = 0; i < 1200; i++) expected.push('line_' + i); + + const actual = []; + for (let i = 0; i < 1200; i++) { + if (i === 100) { + actual.push('EXTRA_A'); + actual.push('EXTRA_B'); + } + actual.push('line_' + i); + } + + const result = myersDiff(actual, expected); + const ops = diffToForwardOps(result); + + const inserts = ops.filter((o) => o.op === 1); + const deletes = ops.filter((o) => o.op === -1); + + assert.strictEqual(inserts.length, 2); + assert.strictEqual(inserts[0].value, 'EXTRA_A'); + assert.strictEqual(inserts[1].value, 'EXTRA_B'); + assert.strictEqual(deletes.length, 0, 'should produce no phantom DELETEs'); +} + +// Test: large identical inputs produce all NOPs +{ + const lines = []; + for (let i = 0; i < 600; i++) lines.push('line_' + i); + + const result = myersDiff(lines, lines); + const ops = diffToForwardOps(result); + + assert.strictEqual(ops.length, 600); + assert.ok(ops.every((o) => o.op === 0)); +} + +// Test: one side much longer than the other +// Diff should be correct (rebuild both sides from ops matches originals) +{ + const actual = []; + for (let i = 0; i < 700; i++) actual.push('line_' + i); + + const expected = []; + for (let i = 0; i < 200; i++) expected.push('line_' + i); + + const result = myersDiff(actual, expected); + const ops = diffToForwardOps(result); + + // Verify correctness: rebuild both sides from diff ops + const { rebuiltActual, rebuiltExpected } = rebuildFromOps(ops); + assert.deepStrictEqual(rebuiltActual, actual); + assert.deepStrictEqual(rebuiltExpected, expected); + + // Chunked diff with fallback boundaries may not find all 200 shared lines, + // but the diff must still be correct (rebuilds match originals). + const nops = ops.filter((o) => o.op === 0); + const inserts = ops.filter((o) => o.op === 1); + const deletes = ops.filter((o) => o.op === -1); + assert.ok(nops.length >= 146, `expected at least 146 NOPs, got ${nops.length}`); + assert.strictEqual(nops.length + inserts.length, actual.length); + assert.strictEqual(nops.length + deletes.length, expected.length); +} + +// Test: expected side longer (deletions) +{ + const actual = []; + for (let i = 0; i < 200; i++) actual.push('line_' + i); + + const expected = []; + for (let i = 0; i < 700; i++) expected.push('line_' + i); + + const result = myersDiff(actual, expected); + const ops = diffToForwardOps(result); + + const { rebuiltActual, rebuiltExpected } = rebuildFromOps(ops); + assert.deepStrictEqual(rebuiltActual, actual); + assert.deepStrictEqual(rebuiltExpected, expected); + + const nops = ops.filter((o) => o.op === 0); + const inserts = ops.filter((o) => o.op === 1); + const deletes = ops.filter((o) => o.op === -1); + assert.ok(nops.length >= 146, `expected at least 146 NOPs, got ${nops.length}`); + assert.strictEqual(nops.length + inserts.length, actual.length); + assert.strictEqual(nops.length + deletes.length, expected.length); +} + +// Test: no matching anchor available (all unique lines) +// Should gracefully fall back and still produce a valid diff +{ + const actual = []; + const expected = []; + for (let i = 0; i < 600; i++) { + actual.push('actual_unique_' + i); + expected.push('expected_unique_' + i); + } + + const result = myersDiff(actual, expected); + const ops = diffToForwardOps(result); + + const inserts = ops.filter((o) => o.op === 1); + const deletes = ops.filter((o) => o.op === -1); + + // All lines are different, so we should get 600 inserts and 600 deletes + assert.strictEqual(inserts.length, 600); + assert.strictEqual(deletes.length, 600); +} + +function diffToForwardOps(diff) { + const ops = []; + for (let i = diff.length - 1; i >= 0; i--) { + ops.push({ op: diff[i][0], value: diff[i][1] }); + } + return ops; +} + +function rebuildFromOps(ops) { + const rebuiltActual = []; + const rebuiltExpected = []; + for (let i = 0; i < ops.length; i++) { + if (ops[i].op === 0) { + rebuiltActual.push(ops[i].value); + rebuiltExpected.push(ops[i].value); + } else if (ops[i].op === 1) { + rebuiltActual.push(ops[i].value); + } else { + rebuiltExpected.push(ops[i].value); + } + } + return { rebuiltActual, rebuiltExpected }; +} + +function createAlignedTestArrays({ lineCount, extraLineAt } = {}) { + const expected = []; + for (let i = 0; i < lineCount; i++) expected.push('line_' + i); + + const actual = []; + for (let i = 0; i < lineCount; i++) { + if (i === extraLineAt) actual.push('EXTRA_' + extraLineAt); + actual.push('line_' + i); + } + + return { actual, expected }; +}