diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d2e0cc..d2696a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,6 @@ ## Change log ---------------------- +- v3.2.1 - improve complex json array support with support for inconsistent schemas (hdwatts) - v3.2.0 - fix value 0 from being omitted - v3.0.1 - fix column values with zero (0) are being replaced with "" (sregger) - v3.0.0 - Promise API & fillTopRow diff --git a/dist/parser/csv.js b/dist/parser/csv.js index 09bb988..e5693dd 100644 --- a/dist/parser/csv.js +++ b/dist/parser/csv.js @@ -84,6 +84,7 @@ var Parser = function () { value: function _parseArray(json, stream) { var self = this; this._headers = this._headers || []; + var normalizedHeaders = []; var fileRows = []; var outputFile = void 0; var fillRows = void 0; @@ -97,6 +98,15 @@ var Parser = function () { return index; }; + var getNormalizedIndex = function getNormalizedIndex(header) { + var index = normalizedHeaders.indexOf(header); + if (index === -1) { + normalizedHeaders.push(header); + index = normalizedHeaders.indexOf(header); + } + return index; + }; + //Generate the csv output fillRows = function fillRows(result) { var rows = []; @@ -111,6 +121,7 @@ var Parser = function () { }; var emptyRowIndexByHeader = {}; var currentRow = newRow(); + var lastIndex = -1; var _iteratorNormalCompletion2 = true; var _didIteratorError2 = false; var _iteratorError2 = undefined; @@ -120,11 +131,13 @@ var Parser = function () { var element = _step2.value; var elementHeaderIndex = getHeaderIndex(element.item); - if (currentRow[elementHeaderIndex] != undefined) { + var normalizedIndex = getNormalizedIndex(element.item); + if (currentRow[elementHeaderIndex] != undefined || normalizedIndex < lastIndex) { fillAndPush(currentRow); currentRow = newRow(); } emptyRowIndexByHeader[elementHeaderIndex] = emptyRowIndexByHeader[elementHeaderIndex] || 0; + lastIndex = normalizedIndex; // make sure there isn't a empty row for this header if (self._options.fillTopRow && emptyRowIndexByHeader[elementHeaderIndex] < rows.length) { rows[emptyRowIndexByHeader[elementHeaderIndex]][elementHeaderIndex] = self._escape(element.value); diff --git a/dist/parser/handler.js b/dist/parser/handler.js index c0fb569..f0f61e4 100644 --- a/dist/parser/handler.js +++ b/dist/parser/handler.js @@ -17,6 +17,7 @@ var Handler = function () { // an object of {typeName:(value,index,parent)=>any} this._options.typeHandlers = this._options.typeHandlers || {}; + this._headers = []; } /** @@ -148,6 +149,22 @@ var Handler = function () { var self = this; var result = []; var firstElementWithoutItem; + + var getHeaderIndex = function getHeaderIndex(item) { + var index = self._headers.indexOf(item); + if (index === -1) { + if (item === null) { + self._headers.unshift(item); + } else { + self._headers.push(item); + } + index = self._headers.indexOf(item); + } + return index; + }; + var sortByHeaders = function sortByHeaders(itemA, itemB) { + return getHeaderIndex(itemA.item) - getHeaderIndex(itemB.item); + }; for (var aIndex = 0; aIndex < array.length; ++aIndex) { var element = array[aIndex]; //Check the propData type @@ -161,6 +178,27 @@ var Handler = function () { } else if (resultCheckType.length > 0 && !firstResult.item && firstElementWithoutItem === undefined) { firstElementWithoutItem = firstResult; } + var toSort = []; + for (var bIndex = 0; bIndex < resultCheckType.length; bIndex++) { + getHeaderIndex(resultCheckType[bIndex].item); + resultCheckType[bIndex]._depth = (resultCheckType[bIndex]._depth || 0) + 1; + if (resultCheckType[bIndex]._depth === 1) { + toSort.push(resultCheckType[bIndex]); + } else if (toSort.length > 0) { + var sorted = toSort.sort(sortByHeaders); + for (var cIndex = 0; cIndex < sorted.length; cIndex++) { + resultCheckType[bIndex - sorted.length + cIndex] = sorted[cIndex]; + } + toSort = []; + } + } + if (toSort.length > 0) { + var _sorted = toSort.sort(sortByHeaders); + for (var _cIndex = 0; _cIndex < _sorted.length; _cIndex++) { + resultCheckType[resultCheckType.length - _sorted.length + _cIndex] = _sorted[_cIndex]; + } + toSort = []; + } //Append to results result = result.concat(resultCheckType); } diff --git a/lib/parser/csv.js b/lib/parser/csv.js index 3d8663a..c5ab419 100644 --- a/lib/parser/csv.js +++ b/lib/parser/csv.js @@ -70,6 +70,7 @@ class Parser { _parseArray(json, stream) { let self = this; this._headers = this._headers || []; + let normalizedHeaders = [] let fileRows = []; let outputFile; let fillRows; @@ -83,6 +84,15 @@ class Parser { return index; }; + let getNormalizedIndex = function(header) { + var index = normalizedHeaders.indexOf(header) + if (index === -1) { + normalizedHeaders.push(header) + index = normalizedHeaders.indexOf(header) + } + return index + } + //Generate the csv output fillRows = function(result) { const rows = []; @@ -91,14 +101,21 @@ class Parser { const newRow = () => new Array(self._headers.length).fill(null); const emptyRowIndexByHeader = {}; let currentRow = newRow(); + let lastIndex = -1 for (let element of result) { let elementHeaderIndex = getHeaderIndex(element.item); - if (currentRow[elementHeaderIndex] != undefined) { + let normalizedIndex = getNormalizedIndex(element.item) + if ( + currentRow[elementHeaderIndex] != undefined || + normalizedIndex < lastIndex + ) { fillAndPush(currentRow); currentRow = newRow(); } emptyRowIndexByHeader[elementHeaderIndex] = emptyRowIndexByHeader[elementHeaderIndex] || 0; + lastIndex = normalizedIndex; // make sure there isn't a empty row for this header + if (self._options.fillTopRow && emptyRowIndexByHeader[elementHeaderIndex] < rows.length) { rows[emptyRowIndexByHeader[elementHeaderIndex]][elementHeaderIndex] = self._escape(element.value); emptyRowIndexByHeader[elementHeaderIndex] += 1; diff --git a/lib/parser/handler.js b/lib/parser/handler.js index 2b9288a..5f8d266 100644 --- a/lib/parser/handler.js +++ b/lib/parser/handler.js @@ -9,6 +9,7 @@ class Handler { // an object of {typeName:(value,index,parent)=>any} this._options.typeHandlers = this._options.typeHandlers || {}; + this._headers = [] } /** @@ -126,6 +127,22 @@ class Handler { let self = this; let result = []; var firstElementWithoutItem; + + const getHeaderIndex = function(item) { + let index = self._headers.indexOf(item); + if (index === -1) { + if (item === null) { + self._headers.unshift(item); + } else { + self._headers.push(item); + } + index = self._headers.indexOf(item); + } + return index + } + const sortByHeaders = function(itemA, itemB) { + return getHeaderIndex(itemA.item) - getHeaderIndex(itemB.item); + } for (let aIndex=0; aIndex < array.length; ++aIndex) { let element = array[aIndex]; //Check the propData type @@ -139,6 +156,29 @@ class Handler { } else if (resultCheckType.length > 0 && !firstResult.item && firstElementWithoutItem === undefined) { firstElementWithoutItem = firstResult; } + let toSort = [] + for (let bIndex=0; bIndex < resultCheckType.length; bIndex++) { + getHeaderIndex(resultCheckType[bIndex].item); + resultCheckType[bIndex]._depth = (resultCheckType[bIndex]._depth || 0) + 1 + if (resultCheckType[bIndex]._depth === 1) { + toSort.push(resultCheckType[bIndex]); + } else if (toSort.length > 0) { + const sorted = toSort.sort(sortByHeaders) + for (let cIndex = 0; cIndex < sorted.length; cIndex++) { + resultCheckType[bIndex - sorted.length + cIndex] = + sorted[cIndex]; + } + toSort = [] + } + } + if (toSort.length > 0) { + const sorted = toSort.sort(sortByHeaders); + for (let cIndex = 0; cIndex < sorted.length; cIndex++) { + resultCheckType[resultCheckType.length - sorted.length + cIndex] = + sorted[cIndex]; + } + toSort = []; + } //Append to results result = result.concat(resultCheckType); } diff --git a/tests/array.js b/tests/array.js index 64669ed..3fe410a 100644 --- a/tests/array.js +++ b/tests/array.js @@ -96,4 +96,191 @@ describe('Array', () => { assert.equal(csv, `a,b,c.a,c.b${os.EOL},b,a1,b1${os.EOL},,a2,b2${os.EOL},,,b3${os.EOL},,a4,b4`) }); + + it("with nested arrays & missing items in schema", async () => { + const csv = await jsonexport([ + { + a: { + b: true, + c: [ + { + d: 1, + h: 1, + }, + { + h: 2, + }, + { + d: 3, + h: 3, + }, + ], + }, + }, + ]); + + assert.equal( + csv, + `a.b,a.c.d,a.c.h${os.EOL}true,1,1${os.EOL},,2${os.EOL},3,3` + ); + }); + + it("with nested arrays & out of order schema", async () => { + const csv = await jsonexport([ + { + a: { + b: true, + c: [ + { + d: 1, + h: 1, + }, + { + h: 5, + d: 4, + }, + { + d: 3, + h: 3, + }, + ], + }, + }, + ]); + + assert.equal( + csv, + `a.b,a.c.d,a.c.h${os.EOL}true,1,1${os.EOL},4,5${os.EOL},3,3` + ); + }); + + + + it("with nested arrays & complex json schema", async () => { + const csv = await jsonexport([ + { + a: { + b: [ + { + c: [ + { + d: { + name: "Name 1", + f: { + g: [ + { + h: 1, + i: 2, + j: 3, + }, + { + h: 4, + i: 5, + j: 6, + }, + { + h: 7, + i: 8, + j: 9, + }, + { + h: 10, + i: 11, + j: 12, + }, + ], + }, + }, + }, + { + d: { + name: "Name 2", + f: { + g: [ + { + h: 13, + i: 14, + j: 15, + }, + ], + }, + }, + }, + ], + }, + ], + }, + }, + ]); + console.log(csv) + assert.equal( + csv, + `a.b.c.d.name,a.b.c.d.f.g.h,a.b.c.d.f.g.i,a.b.c.d.f.g.j${os.EOL}Name 1,1,2,3${os.EOL},4,5,6${os.EOL},7,8,9${os.EOL},10,11,12${os.EOL}Name 2,13,14,15` + ); + }) + + it("with nested arrays & complex json schema & inconsistent items", async () => { + const csv = await jsonexport([ + { + a: { + b: [ + { + c: [ + { + d: { + name: "Name 1", + date: "2020-03-31", + f: { + g: [ + { + h: 1, + i: 2, + j: 3, + }, + { + h: 4, + i: 5, + j: 6, + }, + { + h: 7, + i: 8, + j: 9, + }, + { + h: 10, + i: 11, + j: 12, + }, + ], + }, + }, + }, + { + d: { + date: "2020-06-30", + name: "Name 2", + f: { + g: [ + { + h: 13, + i: 14, + j: 15, + }, + ], + }, + }, + }, + ], + }, + ], + }, + }, + ]); + assert.equal( + csv, + `a.b.c.d.name,a.b.c.d.date,a.b.c.d.f.g.h,a.b.c.d.f.g.i,a.b.c.d.f.g.j${os.EOL}Name 1,2020-03-31,1,2,3${os.EOL},,4,5,6${os.EOL},,7,8,9${os.EOL},,10,11,12${os.EOL}Name 2,2020-06-30,13,14,15` + ); + }); + });