diff --git a/.github/workflows/upload-airtable.yml b/.github/workflows/upload-airtable.yml new file mode 100644 index 0000000..75a89c0 --- /dev/null +++ b/.github/workflows/upload-airtable.yml @@ -0,0 +1,28 @@ +name: Upload to Airtable + +on: + push: + branches: + - master + +jobs: + csv: + name: Upload CSV to Airtable + runs-on: ubuntu-latest + steps: + - name: Check out code + uses: actions/checkout@v3 + - name: Set up Node + uses: actions/setup-node@v4 + with: + node-version: '20' + - name: Install Node.js dependencies + run: npm ci + - name: Create templates.csv + run: node bin/csv.js + - name: Upload to Airtable + run: | + curl -X POST ${{ secrets.AIRTABLE_WEBHOOKS_API_ENDPOINT }} \ + -H "Authorization: Bearer ${{ secrets.AIRTABLE_WEBHOOKS_API_KEY }}" \ + -H "Content-Type: text/csv" \ + --data-binary "@bin/templates.csv" diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index b9014d1..21264a2 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -23,25 +23,4 @@ jobs: cd schema go build -o ../bin/validate ./cmd/validate.go - name: Validate webhook templates - run: ./bin/validate - csv: - if: github.ref == 'refs/heads/master' - name: Upload CSV to Airtable - runs-on: ubuntu-latest - steps: - - name: Check out code - uses: actions/checkout@v3 - - name: Set up Node - uses: actions/setup-node@v4 - with: - node-version: '20' - - name: Install Node.js dependencies - run: npm ci - - name: Create templates.csv - run: node bin/csv.js - - name: Upload to Airtable - run: | - curl -X POST ${{ secrets.AIRTABLE_WEBHOOKS_API_ENDPOINT }} \ - -H "Authorization: Bearer ${{ secrets.AIRTABLE_WEBHOOKS_API_KEY }}" \ - -H "Content-Type: text/csv" \ - --data-binary "@bin/templates.csv" + run: ./bin/validate \ No newline at end of file diff --git a/.gitignore b/.gitignore index 0d6a8f3..5834d43 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,2 @@ -bin/templates.csv \ No newline at end of file +bin/templates.csv +bin/node_modules \ No newline at end of file diff --git a/bin/node_modules/.bin/yaml b/bin/node_modules/.bin/yaml deleted file mode 120000 index 0368324..0000000 --- a/bin/node_modules/.bin/yaml +++ /dev/null @@ -1 +0,0 @@ -../yaml/bin.mjs \ No newline at end of file diff --git a/bin/node_modules/.package-lock.json b/bin/node_modules/.package-lock.json deleted file mode 100644 index fd21a2d..0000000 --- a/bin/node_modules/.package-lock.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "name": "bin", - "lockfileVersion": 2, - "requires": true, - "packages": { - "node_modules/csv-writer": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/csv-writer/-/csv-writer-1.6.0.tgz", - "integrity": "sha512-NOx7YDFWEsM/fTRAJjRpPp8t+MKRVvniAg9wQlUKx20MFrPs73WLJhFf5iteqrxNYnsy924K3Iroh3yNHeYd2g==" - }, - "node_modules/fs": { - "version": "0.0.1-security", - "resolved": "https://registry.npmjs.org/fs/-/fs-0.0.1-security.tgz", - "integrity": "sha512-3XY9e1pP0CVEUCdj5BmfIZxRBTSDycnbqhIOGec9QYtmVH2fbLpj86CFWkrNOkt/Fvty4KZG5lTglL9j/gJ87w==" - }, - "node_modules/inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw==" - }, - "node_modules/path": { - "version": "0.12.7", - "resolved": "https://registry.npmjs.org/path/-/path-0.12.7.tgz", - "integrity": "sha512-aXXC6s+1w7otVF9UletFkFcDsJeO7lSZBPUQhtb5O0xJe8LtYhj/GxldoL09bBj9+ZmE2hNoHqQSFMN5fikh4Q==", - "dependencies": { - "process": "^0.11.1", - "util": "^0.10.3" - } - }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "engines": { - "node": ">= 0.6.0" - } - }, - "node_modules/util": { - "version": "0.10.4", - "resolved": "https://registry.npmjs.org/util/-/util-0.10.4.tgz", - "integrity": "sha512-0Pm9hTQ3se5ll1XihRic3FDIku70C+iHUdT/W926rSgHV5QgXsYbKZN8MSC3tJtSkhuROzvsQjAaFENRXr+19A==", - "dependencies": { - "inherits": "2.0.3" - } - }, - "node_modules/yaml": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.3.tgz", - "integrity": "sha512-sntgmxj8o7DE7g/Qi60cqpLBA3HG3STcDA0kO+WfB05jEKhZMbY7umNm2rBpQvsmZ16/lPXCJGW2672dgOUkrg==", - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14" - } - } - } -} diff --git a/bin/node_modules/csv-writer/CHANGELOG.md b/bin/node_modules/csv-writer/CHANGELOG.md deleted file mode 100644 index 08629d6..0000000 --- a/bin/node_modules/csv-writer/CHANGELOG.md +++ /dev/null @@ -1,58 +0,0 @@ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) -and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html). - -## [1.6.0] - 2020-01-18 -### Added -- Support for specifying values in nested objects. [#34](https://github.com/ryu1kn/csv-writer/pull/34) - -## [1.5.0] - 2019-07-13 -### Added -- Added `alwaysQuote` flag to always double-quote all fields. [#21](https://github.com/ryu1kn/csv-writer/pull/21) - -## [1.4.0] - 2019-06-19 -### Added -- Allow CRLF as a record delimiter. [#27](https://github.com/ryu1kn/csv-writer/pull/27) - -## [1.3.0] - 2019-04-19 -### Changed -- Changed project language from JavaScript to TypeScript. - -### Added -- Made TypeScript type definitions accessible. Thanks to @coyotte508. - [PR #23](https://github.com/ryu1kn/csv-writer/pull/23) - -## [1.2.0] - 2018-08-22 -### Added -- CSV records are now not limited to an array but can be an iterable object. Thanks to @pineapplemachine. - [PR #11](https://github.com/ryu1kn/csv-writer/pull/11) - -## [1.1.0] - 2018-08-20 -### Added -- Allow semicolon as a field delimiter as it is commonly used in CSV in some regions. Thanks to @HKskn. - [PR #8](https://github.com/ryu1kn/csv-writer/pull/8), [#6](https://github.com/ryu1kn/csv-writer/pull/6) - -## [1.0.1] - 2018-08-09 -### Fixed -- Fixed the issue that coverage report badge on README shows question mark. - Use Coveralls instead of CodeClimate to get code coverage. - -## [1.0.0] - 2018-02-28 -### Added -- Support for adding CSV records to already existing files. Thanks to @jonmelcher. [PR #4](https://github.com/ryu1kn/csv-writer/pull/4) - -## [0.0.3] - 2016-11-09 -### Fixed -- Fixed the bug that fields were not always surrounded by double quotes -- Fixed the bug that white space characters on the edge of fields were trimmed - -## [0.0.2] - 2016-10-15 -### Fixed -- Fixed the bug that field values were not quoted when they have newline characters - -## [0.0.1] - 2016-09-09 -### Added -- Initial release of csv-writer diff --git a/bin/node_modules/csv-writer/README.md b/bin/node_modules/csv-writer/README.md deleted file mode 100644 index 6fc8580..0000000 --- a/bin/node_modules/csv-writer/README.md +++ /dev/null @@ -1,328 +0,0 @@ -[![Build Status](https://travis-ci.org/ryu1kn/csv-writer.svg?branch=master)](https://travis-ci.org/ryu1kn/csv-writer) -[![Coverage Status](https://coveralls.io/repos/github/ryu1kn/csv-writer/badge.svg?branch=master)](https://coveralls.io/github/ryu1kn/csv-writer?branch=master) -[![Code Climate](https://codeclimate.com/github/ryu1kn/csv-writer/badges/gpa.svg)](https://codeclimate.com/github/ryu1kn/csv-writer) - -# CSV Writer - -Convert objects/arrays into a CSV string or write them into a file. -It respects [RFC 4180](https://tools.ietf.org/html/rfc4180) for the output CSV format. - -## Prerequisite - -* Node version 4 or above - -## Usage - -The example below shows how you can write records defined as the array of objects into a file. - -```js -const createCsvWriter = require('csv-writer').createObjectCsvWriter; -const csvWriter = createCsvWriter({ - path: 'path/to/file.csv', - header: [ - {id: 'name', title: 'NAME'}, - {id: 'lang', title: 'LANGUAGE'} - ] -}); - -const records = [ - {name: 'Bob', lang: 'French, English'}, - {name: 'Mary', lang: 'English'} -]; - -csvWriter.writeRecords(records) // returns a promise - .then(() => { - console.log('...Done'); - }); - -// This will produce a file path/to/file.csv with following contents: -// -// NAME,LANGUAGE -// Bob,"French, English" -// Mary,English -``` - -You can keep writing records into the same file by calling `writeRecords` multiple times -(but need to wait for the fulfillment of the `promise` of the previous `writeRecords` call). - -```js -// In an `async` function -await csvWriter.writeRecords(records1) -await csvWriter.writeRecords(records2) -... -``` - -However, if you need to keep writing large data to a certain file, you would want to create -node's transform stream and use `CsvStringifier`, which is explained later, inside it -, and pipe the stream into a file write stream. - -If you don't want to write a header line, don't give `title` to header elements and just give field IDs as a string. - -```js -const createCsvWriter = require('csv-writer').createObjectCsvWriter; -const csvWriter = createCsvWriter({ - path: 'path/to/file.csv', - header: ['name', 'lang'] -}); -``` - -If each record is defined as an array, use `createArrayCsvWriter` to get an `csvWriter`. - -```js -const createCsvWriter = require('csv-writer').createArrayCsvWriter; -const csvWriter = createCsvWriter({ - header: ['NAME', 'LANGUAGE'], - path: 'path/to/file.csv' -}); - -const records = [ - ['Bob', 'French, English'], - ['Mary', 'English'] -]; - -csvWriter.writeRecords(records) // returns a promise - .then(() => { - console.log('...Done'); - }); - -// This will produce a file path/to/file.csv with following contents: -// -// NAME,LANGUAGE -// Bob,"French, English" -// Mary,English -``` - -If you just want to get a CSV string but don't want to write into a file, -you can use `createObjectCsvStringifier` (or `createArrayCsvStringifier`) -to get an `csvStringifier`. - -```js -const createCsvStringifier = require('csv-writer').createObjectCsvStringifier; -const csvStringifier = createCsvStringifier({ - header: [ - {id: 'name', title: 'NAME'}, - {id: 'lang', title: 'LANGUAGE'} - ] -}); - -const records = [ - {name: 'Bob', lang: 'French, English'}, - {name: 'Mary', lang: 'English'} -]; - -console.log(csvStringifier.getHeaderString()); -// => 'NAME,LANGUAGE\n' - -console.log(csvStringifier.stringifyRecords(records)); -// => 'Bob,"French, English"\nMary,English\n' -``` - - -## API - -### createObjectCsvWriter(params) - -##### Parameters: - -* params `` - * path `` - - Path to a write file - - * header `>` - - Array of objects (`id` and `title` properties) or strings (field IDs). - A header line will be written to the file only if given as an array of objects. - - * fieldDelimiter `` (optional) - - Default: `,`. Only either comma `,` or semicolon `;` is allowed. - - * recordDelimiter `` (optional) - - Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed. - - * headerIdDelimiter `` (optional) - - Default: `undefined`. Give this value to specify a path to a value in a nested object. - - * alwaysQuote `` (optional) - - Default: `false`. Set it to `true` to double-quote all fields regardless of their values. - - * encoding `` (optional) - - Default: `utf8`. - - * append `` (optional) - - Default: `false`. When `true`, it will append CSV records to the specified file. - If the file doesn't exist, it will create one. - - **NOTE:** A header line will not be written to the file if `true` is given. - -##### Returns: - -* `` - - -### createArrayCsvWriter(params) - -##### Parameters: - -* params `` - * path `` - - Path to a write file - - * header `>` (optional) - - Array of field titles - - * fieldDelimiter `` (optional) - - Default: `,`. Only either comma `,` or semicolon `;` is allowed. - - * recordDelimiter `` (optional) - - Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed. - - * alwaysQuote `` (optional) - - Default: `false`. Set it to `true` to double-quote all fields regardless of their values. - - * encoding `` (optional) - - Default: `utf8`. - - * append `` (optional) - - Default: `false`. When `true`, it will append CSV records to the specified file. - If the file doesn't exist, it will create one. - - **NOTE:** A header line will not be written to the file if `true` is given. - -##### Returns: - -* `` - - -### CsvWriter#writeRecords(records) - -##### Parameters: - -* records `>` - - Depending on which function was used to create a `csvWriter` (i.e. `createObjectCsvWriter` or `createArrayCsvWriter`), - records will be either a collection of objects or arrays. As long as the collection is iterable, it doesn't need to be an array. - -##### Returns: - -* `` - - -### createObjectCsvStringifier(params) - -##### Parameters: - -* params `` - * header `>` - - Array of objects (`id` and `title` properties) or strings (field IDs) - - * fieldDelimiter `` (optional) - - Default: `,`. Only either comma `,` or semicolon `;` is allowed. - - * recordDelimiter `` (optional) - - Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed. - - * headerIdDelimiter `` (optional) - - Default: `undefined`. Give this value to specify a path to a value in a nested object. - - * alwaysQuote `` (optional) - - Default: `false`. Set it to `true` to double-quote all fields regardless of their values. - -##### Returns: - -* `` - -### ObjectCsvStringifier#getHeaderString() - -##### Returns: - -* `` - -### ObjectCsvStringifier#stringifyRecords(records) - -##### Parameters: - -* records `>` - -##### Returns: - -* `` - -### createArrayCsvStringifier(params) - -##### Parameters: - -* params `` - * header `>` (optional) - - Array of field titles - - * fieldDelimiter `` (optional) - - Default: `,`. Only either comma `,` or semicolon `;` is allowed. - - * recordDelimiter `` (optional) - - Default: `\n`. Only either LF (`\n`) or CRLF (`\r\n`) is allowed. - - * alwaysQuote `` (optional) - - Default: `false`. Set it to `true` to double-quote all fields regardless of their values. - -##### Returns: - -* `` - -### ArrayCsvStringifier#getHeaderString() - -##### Returns: - -* `` - -### ArrayCsvStringifier#stringifyRecords(records) - -##### Parameters: - -* records `>>` - -##### Returns: - -* `` - - -## Request Features or Report Bugs - -Feature requests and bug reports are very welcome: https://github.com/ryu1kn/csv-writer/issues - -A couple of requests from me when you raise an issue on GitHub. - -* **Requesting a feature:** Please try to provide the context of why you want the feature. Such as, - in what situation the feature could help you and how, or how the lack of the feature is causing an inconvenience to you. - I can't start thinking of introducing it until I understand how it helps you 🙂 -* **Reporting a bug:** If you could provide a runnable code snippet that reproduces the bug, it would be very helpful! - - -## Development - -### Prerequisite - -* Node version 8 or above -* Docker diff --git a/bin/node_modules/csv-writer/dist/index.js b/bin/node_modules/csv-writer/dist/index.js deleted file mode 100644 index 2c877d1..0000000 --- a/bin/node_modules/csv-writer/dist/index.js +++ /dev/null @@ -1,19 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -var csv_stringifier_factory_1 = require("./lib/csv-stringifier-factory"); -var csv_writer_factory_1 = require("./lib/csv-writer-factory"); -var csvStringifierFactory = new csv_stringifier_factory_1.CsvStringifierFactory(); -var csvWriterFactory = new csv_writer_factory_1.CsvWriterFactory(csvStringifierFactory); -exports.createArrayCsvStringifier = function (params) { - return csvStringifierFactory.createArrayCsvStringifier(params); -}; -exports.createObjectCsvStringifier = function (params) { - return csvStringifierFactory.createObjectCsvStringifier(params); -}; -exports.createArrayCsvWriter = function (params) { - return csvWriterFactory.createArrayCsvWriter(params); -}; -exports.createObjectCsvWriter = function (params) { - return csvWriterFactory.createObjectCsvWriter(params); -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/index.js.map b/bin/node_modules/csv-writer/dist/index.js.map deleted file mode 100644 index d14eb82..0000000 --- a/bin/node_modules/csv-writer/dist/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAAA,yEAIuC;AACvC,+DAAuG;AAEvG,IAAM,qBAAqB,GAAG,IAAI,+CAAqB,EAAE,CAAC;AAC1D,IAAM,gBAAgB,GAAG,IAAI,qCAAgB,CAAC,qBAAqB,CAAC,CAAC;AAExD,QAAA,yBAAyB,GAAG,UAAC,MAAiC;IACnE,OAAA,qBAAqB,CAAC,yBAAyB,CAAC,MAAM,CAAC;AAAvD,CAAuD,CAAC;AAEnD,QAAA,0BAA0B,GAAG,UAAC,MAAkC;IACrE,OAAA,qBAAqB,CAAC,0BAA0B,CAAC,MAAM,CAAC;AAAxD,CAAwD,CAAC;AAEpD,QAAA,oBAAoB,GAAG,UAAC,MAA4B;IACzD,OAAA,gBAAgB,CAAC,oBAAoB,CAAC,MAAM,CAAC;AAA7C,CAA6C,CAAC;AAEzC,QAAA,qBAAqB,GAAG,UAAC,MAA6B;IAC3D,OAAA,gBAAgB,CAAC,qBAAqB,CAAC,MAAM,CAAC;AAA9C,CAA8C,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-stringifier-factory.js b/bin/node_modules/csv-writer/dist/lib/csv-stringifier-factory.js deleted file mode 100644 index 20c2fc9..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-stringifier-factory.js +++ /dev/null @@ -1,20 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -var array_1 = require("./csv-stringifiers/array"); -var field_stringifier_1 = require("./field-stringifier"); -var object_1 = require("./csv-stringifiers/object"); -var CsvStringifierFactory = /** @class */ (function () { - function CsvStringifierFactory() { - } - CsvStringifierFactory.prototype.createArrayCsvStringifier = function (params) { - var fieldStringifier = field_stringifier_1.createFieldStringifier(params.fieldDelimiter, params.alwaysQuote); - return new array_1.ArrayCsvStringifier(fieldStringifier, params.recordDelimiter, params.header); - }; - CsvStringifierFactory.prototype.createObjectCsvStringifier = function (params) { - var fieldStringifier = field_stringifier_1.createFieldStringifier(params.fieldDelimiter, params.alwaysQuote); - return new object_1.ObjectCsvStringifier(fieldStringifier, params.header, params.recordDelimiter, params.headerIdDelimiter); - }; - return CsvStringifierFactory; -}()); -exports.CsvStringifierFactory = CsvStringifierFactory; -//# sourceMappingURL=csv-stringifier-factory.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-stringifier-factory.js.map b/bin/node_modules/csv-writer/dist/lib/csv-stringifier-factory.js.map deleted file mode 100644 index a8ca3f2..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-stringifier-factory.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"csv-stringifier-factory.js","sourceRoot":"","sources":["../../src/lib/csv-stringifier-factory.ts"],"names":[],"mappings":";;AAAA,kDAA6D;AAC7D,yDAA2D;AAC3D,oDAA+D;AAkB/D;IAAA;IAYA,CAAC;IAVG,yDAAyB,GAAzB,UAA0B,MAAiC;QACvD,IAAM,gBAAgB,GAAG,0CAAsB,CAAC,MAAM,CAAC,cAAc,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC;QAC3F,OAAO,IAAI,2BAAmB,CAAC,gBAAgB,EAAE,MAAM,CAAC,eAAe,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC;IAC5F,CAAC;IAED,0DAA0B,GAA1B,UAA2B,MAAkC;QACzD,IAAM,gBAAgB,GAAG,0CAAsB,CAAC,MAAM,CAAC,cAAc,EAAE,MAAM,CAAC,WAAW,CAAC,CAAC;QAC3F,OAAO,IAAI,6BAAoB,CAAC,gBAAgB,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,eAAe,EAAE,MAAM,CAAC,iBAAiB,CAAC,CAAC;IACvH,CAAC;IAEL,4BAAC;AAAD,CAAC,AAZD,IAYC;AAZY,sDAAqB"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js b/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js deleted file mode 100644 index dcade06..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -var DEFAULT_RECORD_DELIMITER = '\n'; -var VALID_RECORD_DELIMITERS = [DEFAULT_RECORD_DELIMITER, '\r\n']; -var CsvStringifier = /** @class */ (function () { - function CsvStringifier(fieldStringifier, recordDelimiter) { - if (recordDelimiter === void 0) { recordDelimiter = DEFAULT_RECORD_DELIMITER; } - this.fieldStringifier = fieldStringifier; - this.recordDelimiter = recordDelimiter; - _validateRecordDelimiter(recordDelimiter); - } - CsvStringifier.prototype.getHeaderString = function () { - var headerRecord = this.getHeaderRecord(); - return headerRecord ? this.joinRecords([this.getCsvLine(headerRecord)]) : null; - }; - CsvStringifier.prototype.stringifyRecords = function (records) { - var _this = this; - var csvLines = Array.from(records, function (record) { return _this.getCsvLine(_this.getRecordAsArray(record)); }); - return this.joinRecords(csvLines); - }; - CsvStringifier.prototype.getCsvLine = function (record) { - var _this = this; - return record - .map(function (fieldValue) { return _this.fieldStringifier.stringify(fieldValue); }) - .join(this.fieldStringifier.fieldDelimiter); - }; - CsvStringifier.prototype.joinRecords = function (records) { - return records.join(this.recordDelimiter) + this.recordDelimiter; - }; - return CsvStringifier; -}()); -exports.CsvStringifier = CsvStringifier; -function _validateRecordDelimiter(delimiter) { - if (VALID_RECORD_DELIMITERS.indexOf(delimiter) === -1) { - throw new Error("Invalid record delimiter `" + delimiter + "` is specified"); - } -} -//# sourceMappingURL=abstract.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js.map b/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js.map deleted file mode 100644 index 6e5151b..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/abstract.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"abstract.js","sourceRoot":"","sources":["../../../src/lib/csv-stringifiers/abstract.ts"],"names":[],"mappings":";;AAGA,IAAM,wBAAwB,GAAG,IAAI,CAAC;AACtC,IAAM,uBAAuB,GAAG,CAAC,wBAAwB,EAAE,MAAM,CAAC,CAAC;AAEnE;IAEI,wBAA6B,gBAAkC,EAClC,eAA0C;QAA1C,gCAAA,EAAA,0CAA0C;QAD1C,qBAAgB,GAAhB,gBAAgB,CAAkB;QAClC,oBAAe,GAAf,eAAe,CAA2B;QACnE,wBAAwB,CAAC,eAAe,CAAC,CAAC;IAC9C,CAAC;IAED,wCAAe,GAAf;QACI,IAAM,YAAY,GAAG,IAAI,CAAC,eAAe,EAAE,CAAC;QAC5C,OAAO,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACnF,CAAC;IAED,yCAAgB,GAAhB,UAAiB,OAAkC;QAAnD,iBAGC;QAFG,IAAM,QAAQ,GAAG,KAAK,CAAC,IAAI,CAAC,OAAO,EAAE,UAAA,MAAM,IAAI,OAAA,KAAI,CAAC,UAAU,CAAC,KAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC,EAA9C,CAA8C,CAAC,CAAC;QAC/F,OAAO,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;IACtC,CAAC;IAMO,mCAAU,GAAlB,UAAmB,MAAe;QAAlC,iBAIC;QAHG,OAAO,MAAM;aACR,GAAG,CAAC,UAAA,UAAU,IAAI,OAAA,KAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC,UAAU,CAAC,EAA3C,CAA2C,CAAC;aAC9D,IAAI,CAAC,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC;IACpD,CAAC;IAEO,oCAAW,GAAnB,UAAoB,OAAiB;QACjC,OAAO,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC,eAAe,CAAC;IACrE,CAAC;IACL,qBAAC;AAAD,CAAC,AA9BD,IA8BC;AA9BqB,wCAAc;AAgCpC,SAAS,wBAAwB,CAAC,SAAiB;IAC/C,IAAI,uBAAuB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE;QACnD,MAAM,IAAI,KAAK,CAAC,+BAA8B,SAAS,mBAAiB,CAAC,CAAC;KAC7E;AACL,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/array.js b/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/array.js deleted file mode 100644 index 5508a57..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/array.js +++ /dev/null @@ -1,33 +0,0 @@ -"use strict"; -var __extends = (this && this.__extends) || (function () { - var extendStatics = function (d, b) { - extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; - return extendStatics(d, b); - }; - return function (d, b) { - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; -})(); -Object.defineProperty(exports, "__esModule", { value: true }); -var abstract_1 = require("./abstract"); -var ArrayCsvStringifier = /** @class */ (function (_super) { - __extends(ArrayCsvStringifier, _super); - function ArrayCsvStringifier(fieldStringifier, recordDelimiter, header) { - var _this = _super.call(this, fieldStringifier, recordDelimiter) || this; - _this.header = header; - return _this; - } - ArrayCsvStringifier.prototype.getHeaderRecord = function () { - return this.header; - }; - ArrayCsvStringifier.prototype.getRecordAsArray = function (record) { - return record; - }; - return ArrayCsvStringifier; -}(abstract_1.CsvStringifier)); -exports.ArrayCsvStringifier = ArrayCsvStringifier; -//# sourceMappingURL=array.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/array.js.map b/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/array.js.map deleted file mode 100644 index 36fcb18..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/array.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"array.js","sourceRoot":"","sources":["../../../src/lib/csv-stringifiers/array.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,uCAA0C;AAI1C;IAAyC,uCAAuB;IAE5D,6BAAY,gBAAkC,EAClC,eAAwB,EACP,MAAiB;QAF9C,YAGI,kBAAM,gBAAgB,EAAE,eAAe,CAAC,SAC3C;QAF4B,YAAM,GAAN,MAAM,CAAW;;IAE9C,CAAC;IAES,6CAAe,GAAzB;QACI,OAAO,IAAI,CAAC,MAAM,CAAC;IACvB,CAAC;IAES,8CAAgB,GAA1B,UAA2B,MAAe;QACtC,OAAO,MAAM,CAAC;IAClB,CAAC;IACL,0BAAC;AAAD,CAAC,AAfD,CAAyC,yBAAc,GAetD;AAfY,kDAAmB"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/object.js b/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/object.js deleted file mode 100644 index 4d74e01..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/object.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; -var __extends = (this && this.__extends) || (function () { - var extendStatics = function (d, b) { - extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; - return extendStatics(d, b); - }; - return function (d, b) { - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; -})(); -Object.defineProperty(exports, "__esModule", { value: true }); -var abstract_1 = require("./abstract"); -var object_1 = require("../lang/object"); -var ObjectCsvStringifier = /** @class */ (function (_super) { - __extends(ObjectCsvStringifier, _super); - function ObjectCsvStringifier(fieldStringifier, header, recordDelimiter, headerIdDelimiter) { - var _this = _super.call(this, fieldStringifier, recordDelimiter) || this; - _this.header = header; - _this.headerIdDelimiter = headerIdDelimiter; - return _this; - } - ObjectCsvStringifier.prototype.getHeaderRecord = function () { - if (!this.isObjectHeader) - return null; - return this.header.map(function (field) { return field.title; }); - }; - ObjectCsvStringifier.prototype.getRecordAsArray = function (record) { - var _this = this; - return this.fieldIds.map(function (fieldId) { return _this.getNestedValue(record, fieldId); }); - }; - ObjectCsvStringifier.prototype.getNestedValue = function (obj, key) { - if (!this.headerIdDelimiter) - return obj[key]; - return key.split(this.headerIdDelimiter).reduce(function (subObj, keyPart) { return (subObj || {})[keyPart]; }, obj); - }; - Object.defineProperty(ObjectCsvStringifier.prototype, "fieldIds", { - get: function () { - return this.isObjectHeader ? this.header.map(function (column) { return column.id; }) : this.header; - }, - enumerable: true, - configurable: true - }); - Object.defineProperty(ObjectCsvStringifier.prototype, "isObjectHeader", { - get: function () { - return object_1.isObject(this.header && this.header[0]); - }, - enumerable: true, - configurable: true - }); - return ObjectCsvStringifier; -}(abstract_1.CsvStringifier)); -exports.ObjectCsvStringifier = ObjectCsvStringifier; -//# sourceMappingURL=object.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/object.js.map b/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/object.js.map deleted file mode 100644 index 3eff4c6..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-stringifiers/object.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"object.js","sourceRoot":"","sources":["../../../src/lib/csv-stringifiers/object.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,uCAA0C;AAG1C,yCAAmD;AAEnD;IAA0C,wCAAgC;IAEtE,8BAAY,gBAAkC,EACjB,MAA+B,EAChD,eAAwB,EACP,iBAA0B;QAHvD,YAII,kBAAM,gBAAgB,EAAE,eAAe,CAAC,SAC3C;QAJ4B,YAAM,GAAN,MAAM,CAAyB;QAE/B,uBAAiB,GAAjB,iBAAiB,CAAS;;IAEvD,CAAC;IAES,8CAAe,GAAzB;QACI,IAAI,CAAC,IAAI,CAAC,cAAc;YAAE,OAAO,IAAI,CAAC;QACtC,OAAQ,IAAI,CAAC,MAA6B,CAAC,GAAG,CAAC,UAAA,KAAK,IAAI,OAAA,KAAK,CAAC,KAAK,EAAX,CAAW,CAAC,CAAC;IACzE,CAAC;IAES,+CAAgB,GAA1B,UAA2B,MAAwB;QAAnD,iBAEC;QADG,OAAO,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,UAAA,OAAO,IAAI,OAAA,KAAI,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,EAApC,CAAoC,CAAC,CAAC;IAC9E,CAAC;IAEO,6CAAc,GAAtB,UAAuB,GAAqB,EAAE,GAAW;QACrD,IAAI,CAAC,IAAI,CAAC,iBAAiB;YAAE,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC;QAC7C,OAAO,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC,MAAM,CAAC,UAAC,MAAM,EAAE,OAAO,IAAK,OAAA,CAAC,MAAM,IAAI,EAAE,CAAC,CAAC,OAAO,CAAC,EAAvB,CAAuB,EAAE,GAAG,CAAC,CAAC;IACvG,CAAC;IAED,sBAAY,0CAAQ;aAApB;YACI,OAAO,IAAI,CAAC,cAAc,CAAC,CAAC,CAAE,IAAI,CAAC,MAA6B,CAAC,GAAG,CAAC,UAAA,MAAM,IAAI,OAAA,MAAM,CAAC,EAAE,EAAT,CAAS,CAAC,CAAC,CAAC,CAAE,IAAI,CAAC,MAAmB,CAAC;QAC1H,CAAC;;;OAAA;IAED,sBAAY,gDAAc;aAA1B;YACI,OAAO,iBAAQ,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QACnD,CAAC;;;OAAA;IACL,2BAAC;AAAD,CAAC,AA9BD,CAA0C,yBAAc,GA8BvD;AA9BY,oDAAoB"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-writer-factory.js b/bin/node_modules/csv-writer/dist/lib/csv-writer-factory.js deleted file mode 100644 index 6c0a091..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-writer-factory.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -var csv_writer_1 = require("./csv-writer"); -var CsvWriterFactory = /** @class */ (function () { - function CsvWriterFactory(csvStringifierFactory) { - this.csvStringifierFactory = csvStringifierFactory; - } - CsvWriterFactory.prototype.createArrayCsvWriter = function (params) { - var csvStringifier = this.csvStringifierFactory.createArrayCsvStringifier({ - header: params.header, - fieldDelimiter: params.fieldDelimiter, - recordDelimiter: params.recordDelimiter, - alwaysQuote: params.alwaysQuote - }); - return new csv_writer_1.CsvWriter(csvStringifier, params.path, params.encoding, params.append); - }; - CsvWriterFactory.prototype.createObjectCsvWriter = function (params) { - var csvStringifier = this.csvStringifierFactory.createObjectCsvStringifier({ - header: params.header, - fieldDelimiter: params.fieldDelimiter, - recordDelimiter: params.recordDelimiter, - headerIdDelimiter: params.headerIdDelimiter, - alwaysQuote: params.alwaysQuote - }); - return new csv_writer_1.CsvWriter(csvStringifier, params.path, params.encoding, params.append); - }; - return CsvWriterFactory; -}()); -exports.CsvWriterFactory = CsvWriterFactory; -//# sourceMappingURL=csv-writer-factory.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-writer-factory.js.map b/bin/node_modules/csv-writer/dist/lib/csv-writer-factory.js.map deleted file mode 100644 index 0851e62..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-writer-factory.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"csv-writer-factory.js","sourceRoot":"","sources":["../../src/lib/csv-writer-factory.ts"],"names":[],"mappings":";;AAAA,2CAAuC;AAyBvC;IACI,0BAA6B,qBAA4C;QAA5C,0BAAqB,GAArB,qBAAqB,CAAuB;IAAG,CAAC;IAE7E,+CAAoB,GAApB,UAAqB,MAA4B;QAC7C,IAAM,cAAc,GAAG,IAAI,CAAC,qBAAqB,CAAC,yBAAyB,CAAC;YACxE,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,cAAc,EAAE,MAAM,CAAC,cAAc;YACrC,eAAe,EAAE,MAAM,CAAC,eAAe;YACvC,WAAW,EAAE,MAAM,CAAC,WAAW;SAClC,CAAC,CAAC;QACH,OAAO,IAAI,sBAAS,CAAC,cAAc,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC;IACtF,CAAC;IAED,gDAAqB,GAArB,UAAsB,MAA6B;QAC/C,IAAM,cAAc,GAAG,IAAI,CAAC,qBAAqB,CAAC,0BAA0B,CAAC;YACzE,MAAM,EAAE,MAAM,CAAC,MAAM;YACrB,cAAc,EAAE,MAAM,CAAC,cAAc;YACrC,eAAe,EAAE,MAAM,CAAC,eAAe;YACvC,iBAAiB,EAAE,MAAM,CAAC,iBAAiB;YAC3C,WAAW,EAAE,MAAM,CAAC,WAAW;SAClC,CAAC,CAAC;QACH,OAAO,IAAI,sBAAS,CAAC,cAAc,EAAE,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,QAAQ,EAAE,MAAM,CAAC,MAAM,CAAC,CAAC;IACtF,CAAC;IACL,uBAAC;AAAD,CAAC,AAvBD,IAuBC;AAvBY,4CAAgB"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-writer.js b/bin/node_modules/csv-writer/dist/lib/csv-writer.js deleted file mode 100644 index 51d7425..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-writer.js +++ /dev/null @@ -1,76 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -var file_writer_1 = require("./file-writer"); -var DEFAULT_INITIAL_APPEND_FLAG = false; -var CsvWriter = /** @class */ (function () { - function CsvWriter(csvStringifier, path, encoding, append) { - if (append === void 0) { append = DEFAULT_INITIAL_APPEND_FLAG; } - this.csvStringifier = csvStringifier; - this.append = append; - this.fileWriter = new file_writer_1.FileWriter(path, this.append, encoding); - } - CsvWriter.prototype.writeRecords = function (records) { - return __awaiter(this, void 0, void 0, function () { - var recordsString, writeString; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - recordsString = this.csvStringifier.stringifyRecords(records); - writeString = this.headerString + recordsString; - return [4 /*yield*/, this.fileWriter.write(writeString)]; - case 1: - _a.sent(); - this.append = true; - return [2 /*return*/]; - } - }); - }); - }; - Object.defineProperty(CsvWriter.prototype, "headerString", { - get: function () { - var headerString = !this.append && this.csvStringifier.getHeaderString(); - return headerString || ''; - }, - enumerable: true, - configurable: true - }); - return CsvWriter; -}()); -exports.CsvWriter = CsvWriter; -//# sourceMappingURL=csv-writer.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/csv-writer.js.map b/bin/node_modules/csv-writer/dist/lib/csv-writer.js.map deleted file mode 100644 index b134506..0000000 --- a/bin/node_modules/csv-writer/dist/lib/csv-writer.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"csv-writer.js","sourceRoot":"","sources":["../../src/lib/csv-writer.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AACA,6CAAyC;AAEzC,IAAM,2BAA2B,GAAG,KAAK,CAAC;AAE1C;IAGI,mBAA6B,cAAiC,EAClD,IAAY,EACZ,QAAiB,EACT,MAAoC;QAApC,uBAAA,EAAA,oCAAoC;QAH3B,mBAAc,GAAd,cAAc,CAAmB;QAG1C,WAAM,GAAN,MAAM,CAA8B;QACpD,IAAI,CAAC,UAAU,GAAG,IAAI,wBAAU,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;IAClE,CAAC;IAEK,gCAAY,GAAlB,UAAmB,OAAY;;;;;;wBACrB,aAAa,GAAG,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC;wBAC9D,WAAW,GAAG,IAAI,CAAC,YAAY,GAAG,aAAa,CAAC;wBACtD,qBAAM,IAAI,CAAC,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,EAAA;;wBAAxC,SAAwC,CAAC;wBACzC,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;;;;;KACtB;IAED,sBAAY,mCAAY;aAAxB;YACI,IAAM,YAAY,GAAG,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,cAAc,CAAC,eAAe,EAAE,CAAC;YAC3E,OAAO,YAAY,IAAI,EAAE,CAAC;QAC9B,CAAC;;;OAAA;IACL,gBAAC;AAAD,CAAC,AArBD,IAqBC;AArBY,8BAAS"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/field-stringifier.js b/bin/node_modules/csv-writer/dist/lib/field-stringifier.js deleted file mode 100644 index 3b22942..0000000 --- a/bin/node_modules/csv-writer/dist/lib/field-stringifier.js +++ /dev/null @@ -1,69 +0,0 @@ -"use strict"; -var __extends = (this && this.__extends) || (function () { - var extendStatics = function (d, b) { - extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; - return extendStatics(d, b); - }; - return function (d, b) { - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; -})(); -Object.defineProperty(exports, "__esModule", { value: true }); -var DEFAULT_FIELD_DELIMITER = ','; -var VALID_FIELD_DELIMITERS = [DEFAULT_FIELD_DELIMITER, ';']; -var FieldStringifier = /** @class */ (function () { - function FieldStringifier(fieldDelimiter) { - this.fieldDelimiter = fieldDelimiter; - } - FieldStringifier.prototype.isEmpty = function (value) { - return typeof value === 'undefined' || value === null || value === ''; - }; - FieldStringifier.prototype.quoteField = function (field) { - return "\"" + field.replace(/"/g, '""') + "\""; - }; - return FieldStringifier; -}()); -exports.FieldStringifier = FieldStringifier; -var DefaultFieldStringifier = /** @class */ (function (_super) { - __extends(DefaultFieldStringifier, _super); - function DefaultFieldStringifier() { - return _super !== null && _super.apply(this, arguments) || this; - } - DefaultFieldStringifier.prototype.stringify = function (value) { - if (this.isEmpty(value)) - return ''; - var str = String(value); - return this.needsQuote(str) ? this.quoteField(str) : str; - }; - DefaultFieldStringifier.prototype.needsQuote = function (str) { - return str.includes(this.fieldDelimiter) || str.includes('\n') || str.includes('"'); - }; - return DefaultFieldStringifier; -}(FieldStringifier)); -var ForceQuoteFieldStringifier = /** @class */ (function (_super) { - __extends(ForceQuoteFieldStringifier, _super); - function ForceQuoteFieldStringifier() { - return _super !== null && _super.apply(this, arguments) || this; - } - ForceQuoteFieldStringifier.prototype.stringify = function (value) { - return this.isEmpty(value) ? '' : this.quoteField(String(value)); - }; - return ForceQuoteFieldStringifier; -}(FieldStringifier)); -function createFieldStringifier(fieldDelimiter, alwaysQuote) { - if (fieldDelimiter === void 0) { fieldDelimiter = DEFAULT_FIELD_DELIMITER; } - if (alwaysQuote === void 0) { alwaysQuote = false; } - _validateFieldDelimiter(fieldDelimiter); - return alwaysQuote ? new ForceQuoteFieldStringifier(fieldDelimiter) : new DefaultFieldStringifier(fieldDelimiter); -} -exports.createFieldStringifier = createFieldStringifier; -function _validateFieldDelimiter(delimiter) { - if (VALID_FIELD_DELIMITERS.indexOf(delimiter) === -1) { - throw new Error("Invalid field delimiter `" + delimiter + "` is specified"); - } -} -//# sourceMappingURL=field-stringifier.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/field-stringifier.js.map b/bin/node_modules/csv-writer/dist/lib/field-stringifier.js.map deleted file mode 100644 index 5a1e391..0000000 --- a/bin/node_modules/csv-writer/dist/lib/field-stringifier.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"field-stringifier.js","sourceRoot":"","sources":["../../src/lib/field-stringifier.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAEA,IAAM,uBAAuB,GAAG,GAAG,CAAC;AACpC,IAAM,sBAAsB,GAAG,CAAC,uBAAuB,EAAE,GAAG,CAAC,CAAC;AAE9D;IACI,0BAA4B,cAAsB;QAAtB,mBAAc,GAAd,cAAc,CAAQ;IAAG,CAAC;IAI5C,kCAAO,GAAjB,UAAkB,KAAa;QAC3B,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,EAAE,CAAC;IAC1E,CAAC;IAES,qCAAU,GAApB,UAAqB,KAAa;QAC9B,OAAO,OAAI,KAAK,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,OAAG,CAAC;IAC5C,CAAC;IACL,uBAAC;AAAD,CAAC,AAZD,IAYC;AAZqB,4CAAgB;AActC;IAAsC,2CAAgB;IAAtD;;IAUA,CAAC;IATG,2CAAS,GAAT,UAAU,KAAa;QACnB,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC;YAAE,OAAO,EAAE,CAAC;QACnC,IAAM,GAAG,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC;QAC1B,OAAO,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC;IAC7D,CAAC;IAEO,4CAAU,GAAlB,UAAmB,GAAW;QAC1B,OAAO,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,cAAc,CAAC,IAAI,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IACxF,CAAC;IACL,8BAAC;AAAD,CAAC,AAVD,CAAsC,gBAAgB,GAUrD;AAED;IAAyC,8CAAgB;IAAzD;;IAIA,CAAC;IAHG,8CAAS,GAAT,UAAU,KAAa;QACnB,OAAO,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;IACrE,CAAC;IACL,iCAAC;AAAD,CAAC,AAJD,CAAyC,gBAAgB,GAIxD;AAED,SAAgB,sBAAsB,CAAC,cAAgD,EAAE,WAAmB;IAArE,+BAAA,EAAA,wCAAgD;IAAE,4BAAA,EAAA,mBAAmB;IACxG,uBAAuB,CAAC,cAAc,CAAC,CAAC;IACxC,OAAO,WAAW,CAAC,CAAC,CAAC,IAAI,0BAA0B,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,IAAI,uBAAuB,CAAC,cAAc,CAAC,CAAC;AACtH,CAAC;AAHD,wDAGC;AAED,SAAS,uBAAuB,CAAC,SAAiB;IAC9C,IAAI,sBAAsB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE;QAClD,MAAM,IAAI,KAAK,CAAC,8BAA6B,SAAS,mBAAiB,CAAC,CAAC;KAC5E;AACL,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/file-writer.js b/bin/node_modules/csv-writer/dist/lib/file-writer.js deleted file mode 100644 index 855955c..0000000 --- a/bin/node_modules/csv-writer/dist/lib/file-writer.js +++ /dev/null @@ -1,72 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -var promise_1 = require("./lang/promise"); -var fs_1 = require("fs"); -var writeFilePromise = promise_1.promisify(fs_1.writeFile); -var DEFAULT_ENCODING = 'utf8'; -var FileWriter = /** @class */ (function () { - function FileWriter(path, append, encoding) { - if (encoding === void 0) { encoding = DEFAULT_ENCODING; } - this.path = path; - this.append = append; - this.encoding = encoding; - } - FileWriter.prototype.write = function (string) { - return __awaiter(this, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writeFilePromise(this.path, string, this.getWriteOption())]; - case 1: - _a.sent(); - this.append = true; - return [2 /*return*/]; - } - }); - }); - }; - FileWriter.prototype.getWriteOption = function () { - return { - encoding: this.encoding, - flag: this.append ? 'a' : 'w' - }; - }; - return FileWriter; -}()); -exports.FileWriter = FileWriter; -//# sourceMappingURL=file-writer.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/file-writer.js.map b/bin/node_modules/csv-writer/dist/lib/file-writer.js.map deleted file mode 100644 index c0ecbfd..0000000 --- a/bin/node_modules/csv-writer/dist/lib/file-writer.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"file-writer.js","sourceRoot":"","sources":["../../src/lib/file-writer.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,0CAAyC;AACzC,yBAA6B;AAE7B,IAAM,gBAAgB,GAAG,mBAAS,CAAC,cAAS,CAAC,CAAC;AAE9C,IAAM,gBAAgB,GAAG,MAAM,CAAC;AAEhC;IAEI,oBAA6B,IAAY,EACrB,MAAe,EACN,QAA2B;QAA3B,yBAAA,EAAA,2BAA2B;QAF3B,SAAI,GAAJ,IAAI,CAAQ;QACrB,WAAM,GAAN,MAAM,CAAS;QACN,aAAQ,GAAR,QAAQ,CAAmB;IACxD,CAAC;IAEK,0BAAK,GAAX,UAAY,MAAc;;;;4BACtB,qBAAM,gBAAgB,CAAC,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,cAAc,EAAE,CAAC,EAAA;;wBAAhE,SAAgE,CAAC;wBACjE,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC;;;;;KACtB;IAEO,mCAAc,GAAtB;QACI,OAAO;YACH,QAAQ,EAAE,IAAI,CAAC,QAAQ;YACvB,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG;SAChC,CAAC;IACN,CAAC;IACL,iBAAC;AAAD,CAAC,AAlBD,IAkBC;AAlBY,gCAAU"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/lang/object.js b/bin/node_modules/csv-writer/dist/lib/lang/object.js deleted file mode 100644 index 5d12434..0000000 --- a/bin/node_modules/csv-writer/dist/lib/lang/object.js +++ /dev/null @@ -1,6 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isObject = function (value) { - return Object.prototype.toString.call(value) === '[object Object]'; -}; -//# sourceMappingURL=object.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/lang/object.js.map b/bin/node_modules/csv-writer/dist/lib/lang/object.js.map deleted file mode 100644 index ac17a38..0000000 --- a/bin/node_modules/csv-writer/dist/lib/lang/object.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"object.js","sourceRoot":"","sources":["../../../src/lib/lang/object.ts"],"names":[],"mappings":";;AAAa,QAAA,QAAQ,GAAG,UAAC,KAAU;IAC/B,OAAA,MAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,iBAAiB;AAA3D,CAA2D,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/lang/promise.js b/bin/node_modules/csv-writer/dist/lib/lang/promise.js deleted file mode 100644 index 1ee4696..0000000 --- a/bin/node_modules/csv-writer/dist/lib/lang/promise.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; -var __spreadArrays = (this && this.__spreadArrays) || function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -function promisify(fn) { - return function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return new Promise(function (resolve, reject) { - var nodeCallback = function (err, result) { - if (err) - reject(err); - else - resolve(result); - }; - fn.apply(null, __spreadArrays(args, [nodeCallback])); - }); - }; -} -exports.promisify = promisify; -//# sourceMappingURL=promise.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/lang/promise.js.map b/bin/node_modules/csv-writer/dist/lib/lang/promise.js.map deleted file mode 100644 index c8d2da6..0000000 --- a/bin/node_modules/csv-writer/dist/lib/lang/promise.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"promise.js","sourceRoot":"","sources":["../../../src/lib/lang/promise.ts"],"names":[],"mappings":";;;;;;;;;AAGA,SAAgB,SAAS,CAAC,EAA4B;IAClD,OAAO;QAAC,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QAClB,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;YAC/B,IAAM,YAAY,GAAG,UAAC,GAAkB,EAAE,MAAW;gBACjD,IAAI,GAAG;oBAAE,MAAM,CAAC,GAAG,CAAC,CAAC;;oBAChB,OAAO,CAAC,MAAM,CAAC,CAAC;YACzB,CAAC,CAAC;YACF,EAAE,CAAC,KAAK,CAAC,IAAI,iBAAM,IAAI,GAAE,YAAY,GAAE,CAAC;QAC5C,CAAC,CAAC,CAAC;IACP,CAAC,CAAC;AACN,CAAC;AAVD,8BAUC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/record.js b/bin/node_modules/csv-writer/dist/lib/record.js deleted file mode 100644 index 1f82ca9..0000000 --- a/bin/node_modules/csv-writer/dist/lib/record.js +++ /dev/null @@ -1,3 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=record.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/lib/record.js.map b/bin/node_modules/csv-writer/dist/lib/record.js.map deleted file mode 100644 index d9c2928..0000000 --- a/bin/node_modules/csv-writer/dist/lib/record.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"record.js","sourceRoot":"","sources":["../../src/lib/record.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/csv-stringifiers/array.test.js b/bin/node_modules/csv-writer/dist/test/csv-stringifiers/array.test.js deleted file mode 100644 index 5fc4090..0000000 --- a/bin/node_modules/csv-writer/dist/test/csv-stringifiers/array.test.js +++ /dev/null @@ -1,114 +0,0 @@ -"use strict"; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -var delimiter_1 = require("../helper/delimiter"); -var index_1 = require("../../index"); -var assert_1 = require("assert"); -describe('ArrayCsvStringifier', function () { - var records = [ - ['FIELD_A1', 'FIELD_B1'], - ['FIELD_A2', 'FIELD_B2'] - ]; - describe('When field delimiter is comma', generateTestCases()); - describe('When field delimiter is semicolon', generateTestCases(';')); - describe('When field delimiter is neither comma nor semicolon', function () { - it('throws an exception', function () { - assert_1.throws(function () { - index_1.createArrayCsvStringifier({ fieldDelimiter: '/' }); - }); - }); - }); - describe('When record delimiter is neither LF nor CR+LF', function () { - it('throws an exception', function () { - assert_1.throws(function () { - index_1.createArrayCsvStringifier({ recordDelimiter: '\r' }); - }); - }); - }); - describe('When records input is an iterable other than an array', function () { - var stringifier = index_1.createArrayCsvStringifier({ - header: ['TITLE_A', 'TITLE_B'] - }); - function recordGenerator() { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, records[0]]; - case 1: - _a.sent(); - return [4 /*yield*/, records[1]]; - case 2: - _a.sent(); - return [2 /*return*/]; - } - }); - } - it('converts the records into CSV', function () { - assert_1.strictEqual(stringifier.stringifyRecords(recordGenerator()), 'FIELD_A1,FIELD_B1\nFIELD_A2,FIELD_B2\n'); - }); - }); - describe('When `alwaysQuote` flag is set', function () { - var stringifier = index_1.createArrayCsvStringifier({ - header: ['TITLE_A', 'TITLE_B'], - alwaysQuote: true - }); - it('quotes all header fields', function () { - assert_1.strictEqual(stringifier.getHeaderString(), '"TITLE_A","TITLE_B"\n'); - }); - it('quotes all data fields', function () { - assert_1.strictEqual(stringifier.stringifyRecords(records), '"FIELD_A1","FIELD_B1"\n"FIELD_A2","FIELD_B2"\n'); - }); - }); - function generateTestCases(fieldDelimiter) { - var delim = delimiter_1.resolveDelimiterChar(fieldDelimiter); - return function () { - describe('header is specified as a list of column titles', function () { - var stringifier = index_1.createArrayCsvStringifier({ - header: ['TITLE_A', 'TITLE_B'], - fieldDelimiter: fieldDelimiter - }); - it("returns a header line with field separated by \"" + delim + "\"", function () { - assert_1.strictEqual(stringifier.getHeaderString(), "TITLE_A" + delim + "TITLE_B\n"); - }); - it("converts given data records into CSV lines with field separated by \"" + delim + "\"", function () { - assert_1.strictEqual(stringifier.stringifyRecords(records), "FIELD_A1" + delim + "FIELD_B1\nFIELD_A2" + delim + "FIELD_B2\n"); - }); - }); - describe('header is not specified', function () { - var stringifier = index_1.createArrayCsvStringifier({ fieldDelimiter: fieldDelimiter }); - it('returns null for header line', function () { - assert_1.strictEqual(stringifier.getHeaderString(), null); - }); - it("converts given data records into CSV lines with field separated by \"" + delim + "\"", function () { - assert_1.strictEqual(stringifier.stringifyRecords(records), "FIELD_A1" + delim + "FIELD_B1\nFIELD_A2" + delim + "FIELD_B2\n"); - }); - }); - }; - } -}); -//# sourceMappingURL=array.test.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/csv-stringifiers/array.test.js.map b/bin/node_modules/csv-writer/dist/test/csv-stringifiers/array.test.js.map deleted file mode 100644 index 888b01c..0000000 --- a/bin/node_modules/csv-writer/dist/test/csv-stringifiers/array.test.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"array.test.js","sourceRoot":"","sources":["../../../src/test/csv-stringifiers/array.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,iDAAyD;AACzD,qCAAsD;AACtD,iCAA2C;AAE3C,QAAQ,CAAC,qBAAqB,EAAE;IAC5B,IAAM,OAAO,GAAG;QACZ,CAAC,UAAU,EAAE,UAAU,CAAC;QACxB,CAAC,UAAU,EAAE,UAAU,CAAC;KAC3B,CAAC;IAEF,QAAQ,CAAC,+BAA+B,EAAE,iBAAiB,EAAE,CAAC,CAAC;IAE/D,QAAQ,CAAC,mCAAmC,EAAE,iBAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;IAEtE,QAAQ,CAAC,qDAAqD,EAAE;QAC5D,EAAE,CAAC,qBAAqB,EAAE;YACtB,eAAM,CAAC;gBACH,iCAAyB,CAAC,EAAC,cAAc,EAAE,GAAG,EAAC,CAAC,CAAC;YACrD,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,+CAA+C,EAAE;QACtD,EAAE,CAAC,qBAAqB,EAAE;YACtB,eAAM,CAAC;gBACH,iCAAyB,CAAC,EAAC,eAAe,EAAE,IAAI,EAAC,CAAC,CAAC;YACvD,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,uDAAuD,EAAE;QAC9D,IAAM,WAAW,GAAG,iCAAyB,CAAC;YAC1C,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;SACjC,CAAC,CAAC;QACH,SAAW,eAAe;;;4BACtB,qBAAM,OAAO,CAAC,CAAC,CAAC,EAAA;;wBAAhB,SAAgB,CAAC;wBACjB,qBAAM,OAAO,CAAC,CAAC,CAAC,EAAA;;wBAAhB,SAAgB,CAAC;;;;SACpB;QAED,EAAE,CAAC,+BAA+B,EAAE;YAChC,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,eAAe,EAAE,CAAC,EAC/C,wCAAwC,CAC3C,CAAC;QACN,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,gCAAgC,EAAE;QACvC,IAAM,WAAW,GAAG,iCAAyB,CAAC;YAC1C,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;YAC9B,WAAW,EAAE,IAAI;SACpB,CAAC,CAAC;QAEH,EAAE,CAAC,0BAA0B,EAAE;YAC3B,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,uBAAuB,CAAC,CAAC;QACxE,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,wBAAwB,EAAE;YACzB,oBAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAAE,gDAAgD,CAAC,CAAC;QACzG,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,SAAS,iBAAiB,CAAC,cAAuB;QAC9C,IAAM,KAAK,GAAG,gCAAoB,CAAC,cAAc,CAAC,CAAC;QACnD,OAAO;YACH,QAAQ,CAAC,gDAAgD,EAAE;gBACvD,IAAM,WAAW,GAAG,iCAAyB,CAAC;oBAC1C,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;oBAC9B,cAAc,gBAAA;iBACjB,CAAC,CAAC;gBAEH,EAAE,CAAC,qDAAkD,KAAK,OAAG,EAAE;oBAC3D,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,YAAU,KAAK,cAAW,CAAC,CAAC;gBAC3E,CAAC,CAAC,CAAC;gBAEH,EAAE,CAAC,0EAAuE,KAAK,OAAG,EAAE;oBAChF,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,aAAW,KAAK,0BAAqB,KAAK,eAAY,CACzD,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;YAEH,QAAQ,CAAC,yBAAyB,EAAE;gBAChC,IAAM,WAAW,GAAG,iCAAyB,CAAC,EAAC,cAAc,gBAAA,EAAC,CAAC,CAAC;gBAEhE,EAAE,CAAC,8BAA8B,EAAE;oBAC/B,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,IAAI,CAAC,CAAC;gBACrD,CAAC,CAAC,CAAC;gBAEH,EAAE,CAAC,0EAAuE,KAAK,OAAG,EAAE;oBAChF,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,aAAW,KAAK,0BAAqB,KAAK,eAAY,CACzD,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;QACP,CAAC,CAAC;IACN,CAAC;AACL,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/csv-stringifiers/object.test.js b/bin/node_modules/csv-writer/dist/test/csv-stringifiers/object.test.js deleted file mode 100644 index 3b3ace1..0000000 --- a/bin/node_modules/csv-writer/dist/test/csv-stringifiers/object.test.js +++ /dev/null @@ -1,156 +0,0 @@ -"use strict"; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -var delimiter_1 = require("../helper/delimiter"); -var index_1 = require("../../index"); -var assert_1 = require("assert"); -describe('ObjectCsvStringifier', function () { - var records = [ - { FIELD_A: 'VALUE_A1', FIELD_B: 'VALUE_B1' }, - { FIELD_A: 'VALUE_A2', FIELD_B: 'VALUE_B2', OTHERS: { FIELD_C: 'VALUE_C2' } } - ]; - describe('When field delimiter is comma', generateTestCases()); - describe('When field delimiter is semicolon', generateTestCases(';')); - describe('When field delimiter is neither comma nor semicolon', function () { - it('throws an exception', function () { - assert_1.throws(function () { - index_1.createObjectCsvStringifier({ - header: ['FIELD_A', 'FIELD_B'], - fieldDelimiter: '/' - }); - }); - }); - }); - describe('When record delimiter is neither LF nor CR+LF', function () { - it('throws an exception', function () { - assert_1.throws(function () { - index_1.createObjectCsvStringifier({ - header: ['FIELD_A', 'FIELD_B'], - recordDelimiter: '\r' - }); - }); - }); - }); - describe('When records input is an iterable other than an array', function () { - var stringifier = index_1.createObjectCsvStringifier({ - header: ['FIELD_A', 'FIELD_B'] - }); - function recordGenerator() { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, records[0]]; - case 1: - _a.sent(); - return [4 /*yield*/, records[1]]; - case 2: - _a.sent(); - return [2 /*return*/]; - } - }); - } - it('converts the records into CSV', function () { - assert_1.strictEqual(stringifier.stringifyRecords(recordGenerator()), 'VALUE_A1,VALUE_B1\nVALUE_A2,VALUE_B2\n'); - }); - }); - describe('When `alwaysQuote` flag is set', function () { - var stringifier = index_1.createObjectCsvStringifier({ - header: [ - { id: 'FIELD_A', title: 'TITLE_A' }, - { id: 'FIELD_B', title: 'TITLE_B' } - ], - alwaysQuote: true - }); - it('quotes all header fields', function () { - assert_1.strictEqual(stringifier.getHeaderString(), '"TITLE_A","TITLE_B"\n'); - }); - it('quotes all data fields', function () { - assert_1.strictEqual(stringifier.stringifyRecords(records), '"VALUE_A1","VALUE_B1"\n"VALUE_A2","VALUE_B2"\n'); - }); - }); - describe('When `headerIdDelimiter` is set', function () { - var stringifier = index_1.createObjectCsvStringifier({ - header: [ - { id: 'FIELD_A', title: 'TITLE_A' }, - { id: 'OTHERS/FIELD_C', title: 'TITLE_C' } - ], - headerIdDelimiter: '/' - }); - it('uses the title as is', function () { - assert_1.strictEqual(stringifier.getHeaderString(), 'TITLE_A,TITLE_C\n'); - }); - it('picks up a value in nested objects', function () { - assert_1.strictEqual(stringifier.stringifyRecords(records), 'VALUE_A1,\nVALUE_A2,VALUE_C2\n'); - }); - }); - function generateTestCases(fieldDelimiter) { - var delim = delimiter_1.resolveDelimiterChar(fieldDelimiter); - return function () { - describe('header is specified with title', function () { - var stringifier = index_1.createObjectCsvStringifier({ - header: [ - { id: 'FIELD_A', title: 'TITLE_A' }, - { id: 'FIELD_B', title: 'TITLE_B' } - ], - fieldDelimiter: fieldDelimiter - }); - it("returns a header line with field separated by \"" + delim + "\"", function () { - assert_1.strictEqual(stringifier.getHeaderString(), "TITLE_A" + delim + "TITLE_B\n"); - }); - it("converts given data records into CSV lines with field separated by \"" + delim + "\"", function () { - assert_1.strictEqual(stringifier.stringifyRecords(records), "VALUE_A1" + delim + "VALUE_B1\nVALUE_A2" + delim + "VALUE_B2\n"); - }); - }); - describe('header is specified without title', function () { - var stringifier = index_1.createObjectCsvStringifier({ - header: ['FIELD_A', 'FIELD_B'], - fieldDelimiter: fieldDelimiter - }); - it('returns null for header line', function () { - assert_1.strictEqual(stringifier.getHeaderString(), null); - }); - it("converts given data records into CSV lines with field separated by \"" + delim + "\"", function () { - assert_1.strictEqual(stringifier.stringifyRecords(records), "VALUE_A1" + delim + "VALUE_B1\nVALUE_A2" + delim + "VALUE_B2\n"); - }); - }); - describe('header columns are given with reverse order', function () { - var stringifier = index_1.createObjectCsvStringifier({ - header: [ - { id: 'FIELD_B', title: 'TITLE_B' }, - { id: 'FIELD_A', title: 'TITLE_A' } - ], - fieldDelimiter: fieldDelimiter - }); - it("layouts fields with the order of headers given with field separated by \"" + delim + "\"", function () { - assert_1.strictEqual(stringifier.stringifyRecords(records), "VALUE_B1" + delim + "VALUE_A1\nVALUE_B2" + delim + "VALUE_A2\n"); - }); - }); - }; - } -}); -//# sourceMappingURL=object.test.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/csv-stringifiers/object.test.js.map b/bin/node_modules/csv-writer/dist/test/csv-stringifiers/object.test.js.map deleted file mode 100644 index bdcdd9d..0000000 --- a/bin/node_modules/csv-writer/dist/test/csv-stringifiers/object.test.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"object.test.js","sourceRoot":"","sources":["../../../src/test/csv-stringifiers/object.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,iDAAyD;AACzD,qCAAuD;AACvD,iCAA2C;AAE3C,QAAQ,CAAC,sBAAsB,EAAE;IAC7B,IAAM,OAAO,GAAG;QACZ,EAAC,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE,UAAU,EAAC;QAC1C,EAAC,OAAO,EAAE,UAAU,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,EAAE,EAAC,OAAO,EAAE,UAAU,EAAC,EAAC;KAC5E,CAAC;IAEF,QAAQ,CAAC,+BAA+B,EAAE,iBAAiB,EAAE,CAAC,CAAC;IAE/D,QAAQ,CAAC,mCAAmC,EAAE,iBAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;IAEtE,QAAQ,CAAC,qDAAqD,EAAE;QAC5D,EAAE,CAAC,qBAAqB,EAAE;YACtB,eAAM,CAAC;gBACH,kCAA0B,CAAC;oBACvB,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;oBAC9B,cAAc,EAAE,GAAG;iBACtB,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,+CAA+C,EAAE;QACtD,EAAE,CAAC,qBAAqB,EAAE;YACtB,eAAM,CAAC;gBACH,kCAA0B,CAAC;oBACvB,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;oBAC9B,eAAe,EAAE,IAAI;iBACxB,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,uDAAuD,EAAE;QAC9D,IAAM,WAAW,GAAG,kCAA0B,CAAC;YAC3C,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;SACjC,CAAC,CAAC;QACH,SAAW,eAAe;;;4BACtB,qBAAM,OAAO,CAAC,CAAC,CAAC,EAAA;;wBAAhB,SAAgB,CAAC;wBACjB,qBAAM,OAAO,CAAC,CAAC,CAAC,EAAA;;wBAAhB,SAAgB,CAAC;;;;SACpB;QAED,EAAE,CAAC,+BAA+B,EAAE;YAChC,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,eAAe,EAAE,CAAC,EAC/C,wCAAwC,CAC3C,CAAC;QACN,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,gCAAgC,EAAE;QACvC,IAAM,WAAW,GAAG,kCAA0B,CAAC;YAC3C,MAAM,EAAE;gBACJ,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;gBACjC,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;aACpC;YACD,WAAW,EAAE,IAAI;SACpB,CAAC,CAAC;QAEH,EAAE,CAAC,0BAA0B,EAAE;YAC3B,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,uBAAuB,CAAC,CAAC;QACxE,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,wBAAwB,EAAE;YACzB,oBAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAAE,gDAAgD,CAAC,CAAC;QACzG,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,iCAAiC,EAAE;QACxC,IAAM,WAAW,GAAG,kCAA0B,CAAC;YAC3C,MAAM,EAAE;gBACJ,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;gBACjC,EAAC,EAAE,EAAE,gBAAgB,EAAE,KAAK,EAAE,SAAS,EAAC;aAC3C;YACD,iBAAiB,EAAE,GAAG;SACzB,CAAC,CAAC;QAEH,EAAE,CAAC,sBAAsB,EAAE;YACvB,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,mBAAmB,CAAC,CAAC;QACpE,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,oCAAoC,EAAE;YACrC,oBAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EAAE,gCAAgC,CAAC,CAAC;QACzF,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,SAAS,iBAAiB,CAAC,cAAuB;QAC9C,IAAM,KAAK,GAAG,gCAAoB,CAAC,cAAc,CAAC,CAAC;QACnD,OAAO;YACH,QAAQ,CAAC,gCAAgC,EAAE;gBACvC,IAAM,WAAW,GAAG,kCAA0B,CAAC;oBAC3C,MAAM,EAAE;wBACJ,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;wBACjC,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;qBACpC;oBACD,cAAc,gBAAA;iBACjB,CAAC,CAAC;gBAEH,EAAE,CAAC,qDAAkD,KAAK,OAAG,EAAE;oBAC3D,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,YAAU,KAAK,cAAW,CAAC,CAAC;gBAC3E,CAAC,CAAC,CAAC;gBAEH,EAAE,CAAC,0EAAuE,KAAK,OAAG,EAAE;oBAChF,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,aAAW,KAAK,0BAAqB,KAAK,eAAY,CACzD,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;YAEH,QAAQ,CAAC,mCAAmC,EAAE;gBAC1C,IAAM,WAAW,GAAG,kCAA0B,CAAC;oBAC3C,MAAM,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;oBAC9B,cAAc,gBAAA;iBACjB,CAAC,CAAC;gBAEH,EAAE,CAAC,8BAA8B,EAAE;oBAC/B,oBAAW,CAAC,WAAW,CAAC,eAAe,EAAE,EAAE,IAAI,CAAC,CAAC;gBACrD,CAAC,CAAC,CAAC;gBAEH,EAAE,CAAC,0EAAuE,KAAK,OAAG,EAAE;oBAChF,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,aAAW,KAAK,0BAAqB,KAAK,eAAY,CACzD,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;YAEH,QAAQ,CAAC,6CAA6C,EAAE;gBACpD,IAAM,WAAW,GAAG,kCAA0B,CAAC;oBAC3C,MAAM,EAAE;wBACJ,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;wBACjC,EAAC,EAAE,EAAE,SAAS,EAAE,KAAK,EAAE,SAAS,EAAC;qBACpC;oBACD,cAAc,gBAAA;iBACjB,CAAC,CAAC;gBAEH,EAAE,CAAC,8EAA2E,KAAK,OAAG,EAAE;oBACpF,oBAAW,CACP,WAAW,CAAC,gBAAgB,CAAC,OAAO,CAAC,EACrC,aAAW,KAAK,0BAAqB,KAAK,eAAY,CACzD,CAAC;gBACN,CAAC,CAAC,CAAC;YACP,CAAC,CAAC,CAAC;QACP,CAAC,CAAC;IACN,CAAC;AACL,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/field-stringifier.test.js b/bin/node_modules/csv-writer/dist/test/field-stringifier.test.js deleted file mode 100644 index 72eb70f..0000000 --- a/bin/node_modules/csv-writer/dist/test/field-stringifier.test.js +++ /dev/null @@ -1,79 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -var delimiter_1 = require("./helper/delimiter"); -var field_stringifier_1 = require("../lib/field-stringifier"); -var assert_1 = require("assert"); -describe('DefaultFieldStringifier', function () { - describe('When field delimiter is comma', generateTestCases(',')); - describe('When field delimiter is semicolon', generateTestCases(';')); - describe('When all fields needs to be quoted', function () { - var stringifier = field_stringifier_1.createFieldStringifier(',', true); - it('quotes a field', function () { - assert_1.strictEqual(stringifier.stringify('VALUE'), '"VALUE"'); - }); - it('does not quote a field of value undefined', function () { - assert_1.strictEqual(stringifier.stringify(), ''); - }); - it('does not quote a field of value null', function () { - assert_1.strictEqual(stringifier.stringify(null), ''); - }); - it('does not quote a field of value empty string', function () { - assert_1.strictEqual(stringifier.stringify(''), ''); - }); - }); - function generateTestCases(fieldDelimiter) { - var delim = delimiter_1.resolveDelimiterChar(fieldDelimiter); - return function () { - var stringifier = field_stringifier_1.createFieldStringifier(fieldDelimiter); - it('returns the same string', function () { - assert_1.strictEqual(stringifier.stringify('VALUE'), 'VALUE'); - }); - it('preserves the whitespace characters', function () { - assert_1.strictEqual(stringifier.stringify(' VALUE\tA '), ' VALUE\tA '); - }); - it("wraps a field value with double quotes if the field contains \"" + delim + "\"", function () { - assert_1.strictEqual(stringifier.stringify("VALUE" + delim + "A"), "\"VALUE" + delim + "A\""); - }); - it('wraps a field value with double quotes if the field contains newline', function () { - assert_1.strictEqual(stringifier.stringify('VALUE\nA'), '"VALUE\nA"'); - }); - it('wraps a field value with double quotes and escape the double quotes if they are used in the field', function () { - assert_1.strictEqual(stringifier.stringify('VALUE"A'), '"VALUE""A"'); - }); - it('escapes double quotes even if double quotes are only on the both edges of the field', function () { - assert_1.strictEqual(stringifier.stringify('"VALUE"'), '"""VALUE"""'); - }); - it('converts a number into a string', function () { - assert_1.strictEqual(stringifier.stringify(1), '1'); - }); - it('converts undefined into an empty string', function () { - assert_1.strictEqual(stringifier.stringify(), ''); - }); - it('converts null into an empty string', function () { - assert_1.strictEqual(stringifier.stringify(null), ''); - }); - it('converts an object into toString-ed value', function () { - var obj = { - name: 'OBJECT_NAME', - toString: function () { return "Name: " + this.name; } - }; - assert_1.strictEqual(stringifier.stringify(obj), 'Name: OBJECT_NAME'); - }); - it("wraps a toString-ed field value with double quote if the value contains \"" + delim + "\"", function () { - var obj = { - name: "OBJECT" + delim + "NAME", - toString: function () { return "Name: " + this.name; } - }; - assert_1.strictEqual(stringifier.stringify(obj), "\"Name: OBJECT" + delim + "NAME\""); - }); - it('escapes double quotes in a toString-ed field value if the value has double quotes', function () { - var obj = { - name: 'OBJECT_NAME"', - toString: function () { return "Name: " + this.name; } - }; - assert_1.strictEqual(stringifier.stringify(obj), '"Name: OBJECT_NAME"""'); - }); - }; - } -}); -//# sourceMappingURL=field-stringifier.test.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/field-stringifier.test.js.map b/bin/node_modules/csv-writer/dist/test/field-stringifier.test.js.map deleted file mode 100644 index 9a606db..0000000 --- a/bin/node_modules/csv-writer/dist/test/field-stringifier.test.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"field-stringifier.test.js","sourceRoot":"","sources":["../../src/test/field-stringifier.test.ts"],"names":[],"mappings":";;AAAA,gDAAwD;AACxD,8DAAgE;AAChE,iCAAmC;AAEnC,QAAQ,CAAC,yBAAyB,EAAE;IAEhC,QAAQ,CAAC,+BAA+B,EAAE,iBAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;IAElE,QAAQ,CAAC,mCAAmC,EAAE,iBAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;IAEtE,QAAQ,CAAC,oCAAoC,EAAE;QAC3C,IAAM,WAAW,GAAG,0CAAsB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAEtD,EAAE,CAAC,gBAAgB,EAAE;YACjB,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE,SAAS,CAAC,CAAC;QAC3D,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,2CAA2C,EAAE;YAC5C,oBAAW,CAAC,WAAW,CAAC,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;QAC7C,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,sCAAsC,EAAE;YACvC,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;QACjD,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,8CAA8C,EAAE;YAC/C,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;QAC/C,CAAC,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,SAAS,iBAAiB,CAAC,cAAsB;QAC7C,IAAM,KAAK,GAAG,gCAAoB,CAAC,cAAc,CAAC,CAAC;QACnD,OAAO;YACH,IAAM,WAAW,GAAG,0CAAsB,CAAC,cAAc,CAAC,CAAC;YAE3D,EAAE,CAAC,yBAAyB,EAAE;gBAC1B,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC;YACzD,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,qCAAqC,EAAE;gBACtC,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,aAAa,CAAC,EAAE,aAAa,CAAC,CAAC;YACrE,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,oEAAiE,KAAK,OAAG,EAAE;gBAC1E,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,UAAQ,KAAK,MAAG,CAAC,EAAE,YAAS,KAAK,QAAI,CAAC,CAAC;YAC7E,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,sEAAsE,EAAE;gBACvE,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,UAAU,CAAC,EAAE,YAAY,CAAC,CAAC;YACjE,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,mGAAmG,EAAE;gBACpG,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE,YAAY,CAAC,CAAC;YAChE,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,qFAAqF,EAAE;gBACtF,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,SAAS,CAAC,EAAE,aAAa,CAAC,CAAC;YACjE,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,iCAAiC,EAAE;gBAClC,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;YAC/C,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,yCAAyC,EAAE;gBAC1C,oBAAW,CAAC,WAAW,CAAC,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;YAC7C,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,oCAAoC,EAAE;gBACrC,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,CAAC;YACjD,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,2CAA2C,EAAE;gBAC5C,IAAM,GAAG,GAAG;oBACR,IAAI,EAAE,aAAa;oBACnB,QAAQ,EAAE,cAAc,OAAO,WAAS,IAAI,CAAC,IAAM,CAAC,CAAC,CAAC;iBACzD,CAAC;gBACF,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,mBAAmB,CAAC,CAAC;YACjE,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,+EAA4E,KAAK,OAAG,EAAE;gBACrF,IAAM,GAAG,GAAG;oBACR,IAAI,EAAE,WAAS,KAAK,SAAM;oBAC1B,QAAQ,EAAE,cAAc,OAAO,WAAS,IAAI,CAAC,IAAM,CAAC,CAAC,CAAC;iBACzD,CAAC;gBACF,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,mBAAgB,KAAK,WAAO,CAAC,CAAC;YAC1E,CAAC,CAAC,CAAC;YAEH,EAAE,CAAC,mFAAmF,EAAE;gBACpF,IAAM,GAAG,GAAG;oBACR,IAAI,EAAE,cAAc;oBACpB,QAAQ,EAAE,cAAc,OAAO,WAAS,IAAI,CAAC,IAAM,CAAC,CAAC,CAAC;iBACzD,CAAC;gBACF,oBAAW,CAAC,WAAW,CAAC,SAAS,CAAC,GAAG,CAAC,EAAE,uBAAuB,CAAC,CAAC;YACrE,CAAC,CAAC,CAAC;QACP,CAAC,CAAC;IACN,CAAC;AACL,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/helper.js b/bin/node_modules/csv-writer/dist/test/helper.js deleted file mode 100644 index bdc23bb..0000000 --- a/bin/node_modules/csv-writer/dist/test/helper.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -var assert_1 = require("assert"); -var fs_1 = require("fs"); -exports.testFilePath = function (id) { return "./test-tmp/" + id + ".csv"; }; -exports.assertFile = function (path, expectedContents, encoding) { - var actualContents = fs_1.readFileSync(path, encoding || 'utf8'); - assert_1.strictEqual(actualContents, expectedContents); -}; -exports.assertRejected = function (p, message) { - return p.then(function () { return new Error('Should not have been called'); }, function (e) { assert_1.strictEqual(e.message, message); }); -}; -//# sourceMappingURL=helper.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/helper.js.map b/bin/node_modules/csv-writer/dist/test/helper.js.map deleted file mode 100644 index a30ab8b..0000000 --- a/bin/node_modules/csv-writer/dist/test/helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"helper.js","sourceRoot":"","sources":["../../src/test/helper.ts"],"names":[],"mappings":";;AAAA,iCAAmC;AACnC,yBAAgC;AAEnB,QAAA,YAAY,GAAG,UAAC,EAAU,IAAK,OAAA,gBAAc,EAAE,SAAM,EAAtB,CAAsB,CAAC;AAEtD,QAAA,UAAU,GAAG,UAAC,IAAY,EAAE,gBAAwB,EAAE,QAAiB;IAChF,IAAM,cAAc,GAAG,iBAAY,CAAC,IAAI,EAAE,QAAQ,IAAI,MAAM,CAAC,CAAC;IAC9D,oBAAW,CAAC,cAAc,EAAE,gBAAgB,CAAC,CAAC;AAClD,CAAC,CAAC;AAEW,QAAA,cAAc,GAAG,UAAC,CAAe,EAAE,OAAe;IAC3D,OAAO,CAAC,CAAC,IAAI,CACT,cAAM,OAAA,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAxC,CAAwC,EAC9C,UAAC,CAAQ,IAAO,oBAAW,CAAC,CAAC,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,CAAC,CAAC,CACrD,CAAC;AACN,CAAC,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/helper/delimiter.js b/bin/node_modules/csv-writer/dist/test/helper/delimiter.js deleted file mode 100644 index 6b17c8b..0000000 --- a/bin/node_modules/csv-writer/dist/test/helper/delimiter.js +++ /dev/null @@ -1,10 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.resolveDelimiterChar = function (char) { - if (char === ',' || char === ';') - return char; - if (typeof char === 'undefined') - return ','; - throw new Error('Invalid field delimiter'); -}; -//# sourceMappingURL=delimiter.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/helper/delimiter.js.map b/bin/node_modules/csv-writer/dist/test/helper/delimiter.js.map deleted file mode 100644 index a0c18d9..0000000 --- a/bin/node_modules/csv-writer/dist/test/helper/delimiter.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"delimiter.js","sourceRoot":"","sources":["../../../src/test/helper/delimiter.ts"],"names":[],"mappings":";;AACa,QAAA,oBAAoB,GAAG,UAAC,IAAa;IAC9C,IAAI,IAAI,KAAK,GAAG,IAAI,IAAI,KAAK,GAAG;QAAE,OAAO,IAAI,CAAC;IAC9C,IAAI,OAAO,IAAI,KAAK,WAAW;QAAE,OAAO,GAAG,CAAC;IAC5C,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;AAC/C,CAAC,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/lang/promise.test.js b/bin/node_modules/csv-writer/dist/test/lang/promise.test.js deleted file mode 100644 index 046541d..0000000 --- a/bin/node_modules/csv-writer/dist/test/lang/promise.test.js +++ /dev/null @@ -1,76 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -var promise_1 = require("../../lib/lang/promise"); -var assert_1 = require("assert"); -var helper_1 = require("../helper"); -describe('Promise', function () { - var greetAsync = function (name, callback) { - setTimeout(function () { - if (name === 'foo') - callback(null, "Hello, " + name + "!"); - else - callback(new Error("We don't know " + name)); - }, 0); - }; - var promisifiedFn = promise_1.promisify(greetAsync); - it('promisify node style callback', function () { return __awaiter(void 0, void 0, void 0, function () { - var _a; - return __generator(this, function (_b) { - switch (_b.label) { - case 0: - _a = assert_1.strictEqual; - return [4 /*yield*/, promisifiedFn('foo')]; - case 1: - _a.apply(void 0, [_b.sent(), 'Hello, foo!']); - return [2 /*return*/]; - } - }); - }); }); - it('raise an error for error', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, helper_1.assertRejected(promisifiedFn('bar'), "We don't know bar")]; - case 1: - _a.sent(); - return [2 /*return*/]; - } - }); - }); }); -}); -//# sourceMappingURL=promise.test.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/lang/promise.test.js.map b/bin/node_modules/csv-writer/dist/test/lang/promise.test.js.map deleted file mode 100644 index 820bb3c..0000000 --- a/bin/node_modules/csv-writer/dist/test/lang/promise.test.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"promise.test.js","sourceRoot":"","sources":["../../../src/test/lang/promise.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,kDAAiD;AACjD,iCAAmC;AACnC,oCAAyC;AAEzC,QAAQ,CAAC,SAAS,EAAE;IAChB,IAAM,UAAU,GAAG,UAAC,IAAY,EAAE,QAAsD;QACpF,UAAU,CAAC;YACP,IAAI,IAAI,KAAK,KAAK;gBAAE,QAAQ,CAAC,IAAI,EAAE,YAAU,IAAI,MAAG,CAAC,CAAC;;gBACjD,QAAQ,CAAC,IAAI,KAAK,CAAC,mBAAiB,IAAM,CAAC,CAAC,CAAC;QACtD,CAAC,EAAE,CAAC,CAAC,CAAC;IACV,CAAC,CAAC;IACF,IAAM,aAAa,GAAG,mBAAS,CAAC,UAAU,CAAC,CAAC;IAE5C,EAAE,CAAC,+BAA+B,EAAE;;;;;oBAChC,KAAA,oBAAW,CAAA;oBAAC,qBAAM,aAAa,CAAC,KAAK,CAAC,EAAA;;oBAAtC,kBAAY,SAA0B,EAAE,aAAa,EAAC,CAAC;;;;SAC1D,CAAC,CAAC;IAEH,EAAE,CAAC,0BAA0B,EAAE;;;wBAC3B,qBAAM,uBAAc,CAAC,aAAa,CAAC,KAAK,CAAC,EAAE,mBAAmB,CAAC,EAAA;;oBAA/D,SAA+D,CAAC;;;;SACnE,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/write-array-records.test.js b/bin/node_modules/csv-writer/dist/test/write-array-records.test.js deleted file mode 100644 index 6b9d88f..0000000 --- a/bin/node_modules/csv-writer/dist/test/write-array-records.test.js +++ /dev/null @@ -1,209 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -var helper_1 = require("./helper"); -var fs_1 = require("fs"); -var index_1 = require("../index"); -describe('Write array records into CSV', function () { - var makeFilePath = function (id) { return helper_1.testFilePath("array-" + id); }; - var records = [ - ['Bob', 'French'], - ['Mary', 'English'] - ]; - describe('When only path is specified', function () { - var filePath = makeFilePath('minimum'); - var writer; - beforeEach(function () { - writer = index_1.createArrayCsvWriter({ path: filePath }); - }); - it('writes records to a new file', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'Bob,French\nMary,English\n'); - return [2 /*return*/]; - } - }); - }); }); - it('appends records when requested to write to the same file', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords([records[0]])]; - case 1: - _a.sent(); - return [4 /*yield*/, writer.writeRecords([records[1]])]; - case 2: - _a.sent(); - helper_1.assertFile(filePath, 'Bob,French\nMary,English\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When field header is given', function () { - var filePath = makeFilePath('header'); - var writer; - beforeEach(function () { - writer = index_1.createArrayCsvWriter({ - path: filePath, - header: ['NAME', 'LANGUAGE'] - }); - }); - it('writes a header', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n'); - return [2 /*return*/]; - } - }); - }); }); - it('appends records without headers', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords([records[0]])]; - case 1: - _a.sent(); - return [4 /*yield*/, writer.writeRecords([records[1]])]; - case 2: - _a.sent(); - helper_1.assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When `append` flag is specified', function () { - var filePath = makeFilePath('append'); - fs_1.writeFileSync(filePath, 'Mike,German\n', 'utf8'); - var writer = index_1.createArrayCsvWriter({ - path: filePath, - append: true - }); - it('do not overwrite the existing contents and appends records to them', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords([records[1]])]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'Mike,German\nMary,English\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When encoding is specified', function () { - var filePath = makeFilePath('encoding'); - var writer = index_1.createArrayCsvWriter({ - path: filePath, - encoding: 'utf16le' - }); - it('writes to a file with the specified encoding', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'Bob,French\nMary,English\n', 'utf16le'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When semicolon is specified as a field delimiter', function () { - var filePath = makeFilePath('field-delimiter'); - var writer = index_1.createArrayCsvWriter({ - path: filePath, - header: ['NAME', 'LANGUAGE'], - fieldDelimiter: ';' - }); - it('uses semicolon instead of comma to separate fields', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'NAME;LANGUAGE\nBob;French\nMary;English\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When newline is specified', function () { - var filePath = makeFilePath('newline'); - var writer = index_1.createArrayCsvWriter({ - path: filePath, - recordDelimiter: '\r\n' - }); - it('writes to a file with the specified newline character', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'Bob,French\r\nMary,English\r\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When `alwaysQuote` flag is set', function () { - var filePath = makeFilePath('always-quote'); - var writer = index_1.createArrayCsvWriter({ - path: filePath, - header: ['NAME', 'LANGUAGE'], - alwaysQuote: true - }); - it('quotes all fields', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, '"NAME","LANGUAGE"\n"Bob","French"\n"Mary","English"\n'); - return [2 /*return*/]; - } - }); - }); }); - }); -}); -//# sourceMappingURL=write-array-records.test.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/write-array-records.test.js.map b/bin/node_modules/csv-writer/dist/test/write-array-records.test.js.map deleted file mode 100644 index 566e090..0000000 --- a/bin/node_modules/csv-writer/dist/test/write-array-records.test.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"write-array-records.test.js","sourceRoot":"","sources":["../../src/test/write-array-records.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,mCAAkD;AAElD,yBAAiC;AACjC,kCAA8C;AAE9C,QAAQ,CAAC,8BAA8B,EAAE;IAErC,IAAM,YAAY,GAAG,UAAC,EAAU,IAAK,OAAA,qBAAY,CAAC,WAAS,EAAI,CAAC,EAA3B,CAA2B,CAAC;IACjE,IAAM,OAAO,GAAG;QACZ,CAAC,KAAK,EAAE,QAAQ,CAAC;QACjB,CAAC,MAAM,EAAE,SAAS,CAAC;KACtB,CAAC;IAEF,QAAQ,CAAC,6BAA6B,EAAE;QACpC,IAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;QACzC,IAAI,MAA2B,CAAC;QAEhC,UAAU,CAAC;YACP,MAAM,GAAG,4BAAoB,CAAC,EAAC,IAAI,EAAE,QAAQ,EAAC,CAAC,CAAC;QACpD,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,8BAA8B,EAAE;;;4BAC/B,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,CAAC,CAAC;;;;aACtD,CAAC,CAAC;QAEH,EAAE,CAAC,0DAA0D,EAAE;;;4BAC3D,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,CAAC,CAAC;;;;aACtD,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,4BAA4B,EAAE;QACnC,IAAM,QAAQ,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACxC,IAAI,MAA2B,CAAC;QAEhC,UAAU,CAAC;YACP,MAAM,GAAG,4BAAoB,CAAC;gBAC1B,IAAI,EAAE,QAAQ;gBACd,MAAM,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;aAC/B,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,iBAAiB,EAAE;;;4BAClB,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;QAEH,EAAE,CAAC,iCAAiC,EAAE;;;4BAClC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,iCAAiC,EAAE;QACxC,IAAM,QAAQ,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACxC,kBAAa,CAAC,QAAQ,EAAE,eAAe,EAAE,MAAM,CAAC,CAAC;QACjD,IAAM,MAAM,GAAG,4BAAoB,CAAC;YAChC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,IAAI;SACf,CAAC,CAAC;QAEH,EAAE,CAAC,oEAAoE,EAAE;;;4BACrE,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,6BAA6B,CAAC,CAAC;;;;aACvD,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,4BAA4B,EAAE;QACnC,IAAM,QAAQ,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC;QAC1C,IAAM,MAAM,GAAG,4BAAoB,CAAC;YAChC,IAAI,EAAE,QAAQ;YACd,QAAQ,EAAE,SAAS;SACtB,CAAC,CAAC;QAEH,EAAE,CAAC,8CAA8C,EAAE;;;4BAC/C,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,EAAE,SAAS,CAAC,CAAC;;;;aACjE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,kDAAkD,EAAE;QACzD,IAAM,QAAQ,GAAG,YAAY,CAAC,iBAAiB,CAAC,CAAC;QACjD,IAAM,MAAM,GAAG,4BAAoB,CAAC;YAChC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;YAC5B,cAAc,EAAE,GAAG;SACtB,CAAC,CAAC;QAEH,EAAE,CAAC,oDAAoD,EAAE;;;4BACrD,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,2BAA2B,EAAE;QAClC,IAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;QACzC,IAAM,MAAM,GAAG,4BAAoB,CAAC;YAChC,IAAI,EAAE,QAAQ;YACd,eAAe,EAAE,MAAM;SAC1B,CAAC,CAAC;QAEH,EAAE,CAAC,uDAAuD,EAAE;;;4BACxD,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,gCAAgC,CAAC,CAAC;;;;aAC1D,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,gCAAgC,EAAE;QACvC,IAAM,QAAQ,GAAG,YAAY,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAM,MAAM,GAAG,4BAAoB,CAAC;YAChC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;YAC5B,WAAW,EAAE,IAAI;SACpB,CAAC,CAAC;QAEH,EAAE,CAAC,mBAAmB,EAAE;;;4BACpB,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,uDAAuD,CAAC,CAAC;;;;aACjF,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/write-object-records.test.js b/bin/node_modules/csv-writer/dist/test/write-object-records.test.js deleted file mode 100644 index 459eeb5..0000000 --- a/bin/node_modules/csv-writer/dist/test/write-object-records.test.js +++ /dev/null @@ -1,252 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -var helper_1 = require("./helper"); -var fs_1 = require("fs"); -var index_1 = require("../index"); -describe('Write object records into CSV', function () { - var makeFilePath = function (id) { return helper_1.testFilePath("object-" + id); }; - var records = [ - { name: 'Bob', lang: 'French', address: { country: 'France' } }, - { name: 'Mary', lang: 'English' } - ]; - describe('When only path and header ids are given', function () { - var filePath = makeFilePath('minimum'); - var writer; - beforeEach(function () { - writer = index_1.createObjectCsvWriter({ - path: filePath, - header: ['name', 'lang'] - }); - }); - it('writes records to a new file', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'Bob,French\nMary,English\n'); - return [2 /*return*/]; - } - }); - }); }); - it('appends records when requested to write to the same file', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords([records[0]])]; - case 1: - _a.sent(); - return [4 /*yield*/, writer.writeRecords([records[1]])]; - case 2: - _a.sent(); - helper_1.assertFile(filePath, 'Bob,French\nMary,English\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When header ids are given with reverse order', function () { - var filePath = makeFilePath('column-order'); - var writer = index_1.createObjectCsvWriter({ - path: filePath, - header: ['lang', 'name'] - }); - it('also writes columns with reverse order', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'French,Bob\nEnglish,Mary\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When field header is given with titles', function () { - var filePath = makeFilePath('header'); - var writer; - beforeEach(function () { - writer = index_1.createObjectCsvWriter({ - path: filePath, - header: [{ id: 'name', title: 'NAME' }, { id: 'lang', title: 'LANGUAGE' }] - }); - }); - it('writes a header', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n'); - return [2 /*return*/]; - } - }); - }); }); - it('appends records without headers', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords([records[0]])]; - case 1: - _a.sent(); - return [4 /*yield*/, writer.writeRecords([records[1]])]; - case 2: - _a.sent(); - helper_1.assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When `append` flag is specified', function () { - var filePath = makeFilePath('append'); - fs_1.writeFileSync(filePath, 'Mike,German\n', 'utf8'); - var writer = index_1.createObjectCsvWriter({ - path: filePath, - header: ['name', 'lang'], - append: true - }); - it('do not overwrite the existing contents and appends records to them', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords([records[1]])]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'Mike,German\nMary,English\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When encoding is specified', function () { - var filePath = makeFilePath('encoding'); - var writer = index_1.createObjectCsvWriter({ - path: filePath, - header: ['name', 'lang'], - encoding: 'utf16le' - }); - it('writes to a file with the specified encoding', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'Bob,French\nMary,English\n', 'utf16le'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When semicolon is specified as a field delimiter', function () { - var filePath = makeFilePath('field-delimiter'); - var writer = index_1.createObjectCsvWriter({ - path: filePath, - header: [{ id: 'name', title: 'NAME' }, { id: 'lang', title: 'LANGUAGE' }], - fieldDelimiter: ';' - }); - it('uses semicolon instead of comma to separate fields', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'NAME;LANGUAGE\nBob;French\nMary;English\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When newline is specified', function () { - var filePath = makeFilePath('newline'); - var writer = index_1.createObjectCsvWriter({ - path: filePath, - header: ['name', 'lang'], - recordDelimiter: '\r\n' - }); - it('writes to a file with the specified newline character', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'Bob,French\r\nMary,English\r\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When `alwaysQuote` flag is set', function () { - var filePath = makeFilePath('always-quote'); - var writer = index_1.createObjectCsvWriter({ - path: filePath, - header: [{ id: 'name', title: 'NAME' }, { id: 'lang', title: 'LANGUAGE' }], - alwaysQuote: true - }); - it('quotes all fields', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, '"NAME","LANGUAGE"\n"Bob","French"\n"Mary","English"\n'); - return [2 /*return*/]; - } - }); - }); }); - }); - describe('When `headerIdDelimiter` flag is set', function () { - var filePath = makeFilePath('nested'); - var writer = index_1.createObjectCsvWriter({ - path: filePath, - header: [{ id: 'name', title: 'NAME' }, { id: 'address.country', title: 'COUNTRY' }], - headerIdDelimiter: '.' - }); - it('breaks keys into key paths', function () { return __awaiter(void 0, void 0, void 0, function () { - return __generator(this, function (_a) { - switch (_a.label) { - case 0: return [4 /*yield*/, writer.writeRecords(records)]; - case 1: - _a.sent(); - helper_1.assertFile(filePath, 'NAME,COUNTRY\nBob,France\nMary,\n'); - return [2 /*return*/]; - } - }); - }); }); - }); -}); -//# sourceMappingURL=write-object-records.test.js.map \ No newline at end of file diff --git a/bin/node_modules/csv-writer/dist/test/write-object-records.test.js.map b/bin/node_modules/csv-writer/dist/test/write-object-records.test.js.map deleted file mode 100644 index aed39fc..0000000 --- a/bin/node_modules/csv-writer/dist/test/write-object-records.test.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"write-object-records.test.js","sourceRoot":"","sources":["../../src/test/write-object-records.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,mCAAkD;AAElD,yBAAiC;AACjC,kCAA+C;AAG/C,QAAQ,CAAC,+BAA+B,EAAE;IAEtC,IAAM,YAAY,GAAG,UAAC,EAAU,IAAK,OAAA,qBAAY,CAAC,YAAU,EAAI,CAAC,EAA5B,CAA4B,CAAC;IAClE,IAAM,OAAO,GAAG;QACZ,EAAC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,EAAC,OAAO,EAAE,QAAQ,EAAC,EAAC;QAC3D,EAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,SAAS,EAAC;KAClC,CAAC;IAEF,QAAQ,CAAC,yCAAyC,EAAE;QAChD,IAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;QACzC,IAAI,MAAiC,CAAC;QAEtC,UAAU,CAAC;YACP,MAAM,GAAG,6BAAqB,CAAC;gBAC3B,IAAI,EAAE,QAAQ;gBACd,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;aAC3B,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,8BAA8B,EAAE;;;4BAC/B,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,CAAC,CAAC;;;;aACtD,CAAC,CAAC;QAEH,EAAE,CAAC,0DAA0D,EAAE;;;4BAC3D,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,CAAC,CAAC;;;;aACtD,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,8CAA8C,EAAE;QACrD,IAAM,QAAQ,GAAG,YAAY,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;SAC3B,CAAC,CAAC;QAEH,EAAE,CAAC,wCAAwC,EAAE;;;4BACzC,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,CAAC,CAAC;;;;aACtD,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,wCAAwC,EAAE;QAC/C,IAAM,QAAQ,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACxC,IAAI,MAAiC,CAAC;QAEtC,UAAU,CAAC;YACP,MAAM,GAAG,6BAAqB,CAAC;gBAC3B,IAAI,EAAE,QAAQ;gBACd,MAAM,EAAE,CAAC,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAC,EAAE,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,UAAU,EAAC,CAAC;aACzE,CAAC,CAAC;QACP,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,iBAAiB,EAAE;;;4BAClB,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;QAEH,EAAE,CAAC,iCAAiC,EAAE;;;4BAClC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,iCAAiC,EAAE;QACxC,IAAM,QAAQ,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACxC,kBAAa,CAAC,QAAQ,EAAE,eAAe,EAAE,MAAM,CAAC,CAAC;QACjD,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;YACxB,MAAM,EAAE,IAAI;SACf,CAAC,CAAC;QAEH,EAAE,CAAC,oEAAoE,EAAE;;;4BACrE,qBAAM,MAAM,CAAC,YAAY,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAA;;wBAAvC,SAAuC,CAAC;wBACxC,mBAAU,CAAC,QAAQ,EAAE,6BAA6B,CAAC,CAAC;;;;aACvD,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,4BAA4B,EAAE;QACnC,IAAM,QAAQ,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC;QAC1C,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;YACxB,QAAQ,EAAE,SAAS;SACtB,CAAC,CAAC;QAEH,EAAE,CAAC,8CAA8C,EAAE;;;4BAC/C,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,4BAA4B,EAAE,SAAS,CAAC,CAAC;;;;aACjE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,kDAAkD,EAAE;QACzD,IAAM,QAAQ,GAAG,YAAY,CAAC,iBAAiB,CAAC,CAAC;QACjD,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAC,EAAE,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,UAAU,EAAC,CAAC;YACtE,cAAc,EAAE,GAAG;SACtB,CAAC,CAAC;QAEH,EAAE,CAAC,oDAAoD,EAAE;;;4BACrD,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,2CAA2C,CAAC,CAAC;;;;aACrE,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,2BAA2B,EAAE;QAClC,IAAM,QAAQ,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC;QACzC,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC;YACxB,eAAe,EAAE,MAAM;SAC1B,CAAC,CAAC;QAEH,EAAE,CAAC,uDAAuD,EAAE;;;4BACxD,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,gCAAgC,CAAC,CAAC;;;;aAC1D,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,gCAAgC,EAAE;QACvC,IAAM,QAAQ,GAAG,YAAY,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAC,EAAE,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,UAAU,EAAC,CAAC;YACtE,WAAW,EAAE,IAAI;SACpB,CAAC,CAAC;QAEH,EAAE,CAAC,mBAAmB,EAAE;;;4BACpB,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,uDAAuD,CAAC,CAAC;;;;aACjF,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;IAEH,QAAQ,CAAC,sCAAsC,EAAE;QAC7C,IAAM,QAAQ,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;QACxC,IAAM,MAAM,GAAG,6BAAqB,CAAC;YACjC,IAAI,EAAE,QAAQ;YACd,MAAM,EAAE,CAAC,EAAC,EAAE,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAC,EAAE,EAAC,EAAE,EAAE,iBAAiB,EAAE,KAAK,EAAE,SAAS,EAAC,CAAC;YAChF,iBAAiB,EAAE,GAAG;SACzB,CAAC,CAAC;QAEH,EAAE,CAAC,4BAA4B,EAAE;;;4BAC7B,qBAAM,MAAM,CAAC,YAAY,CAAC,OAAO,CAAC,EAAA;;wBAAlC,SAAkC,CAAC;wBACnC,mBAAU,CAAC,QAAQ,EAAE,mCAAmC,CAAC,CAAC;;;;aAC7D,CAAC,CAAC;IACP,CAAC,CAAC,CAAC;AACP,CAAC,CAAC,CAAC"} \ No newline at end of file diff --git a/bin/node_modules/csv-writer/package.json b/bin/node_modules/csv-writer/package.json deleted file mode 100644 index 28b2f5b..0000000 --- a/bin/node_modules/csv-writer/package.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "name": "csv-writer", - "version": "1.6.0", - "description": "Convert objects/arrays into a CSV string or write them into a CSV file", - "main": "dist/index.js", - "types": "src/index.ts", - "scripts": { - "compile": "tsc -p ./", - "test": "npm run test:unit && npm run test:it", - "pretest:unit": "rm -rf test-tmp && mkdir test-tmp", - "test:unit": "mocha --require ts-node/register --recursive 'src/test/**/*.ts'", - "test:it": "test-integration/test.sh", - "coverage": "nyc npm run test:unit", - "lint": "tslint -p .", - "prepare": "npm run compile" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/ryu1kn/csv-writer.git" - }, - "keywords": [ - "csv", - "writer", - "stringify" - ], - "author": "Ryuichi Inagaki", - "license": "MIT", - "bugs": { - "url": "https://github.com/ryu1kn/csv-writer/issues" - }, - "homepage": "https://github.com/ryu1kn/csv-writer#readme", - "devDependencies": { - "@types/mocha": "^5.2.7", - "@types/node": "^12.12.25", - "codeclimate-test-reporter": "^0.5.1", - "coveralls": "^3.0.9", - "mocha": "^7.0.0", - "nyc": "^15.0.0", - "ts-node": "^8.6.2", - "tslint": "^5.20.1", - "typescript": "^3.7.5" - } -} diff --git a/bin/node_modules/csv-writer/src/index.ts b/bin/node_modules/csv-writer/src/index.ts deleted file mode 100644 index 17d713f..0000000 --- a/bin/node_modules/csv-writer/src/index.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { - ArrayCsvStringifierParams, - CsvStringifierFactory, - ObjectCsvStringifierParams -} from './lib/csv-stringifier-factory'; -import {ArrayCsvWriterParams, CsvWriterFactory, ObjectCsvWriterParams} from './lib/csv-writer-factory'; - -const csvStringifierFactory = new CsvStringifierFactory(); -const csvWriterFactory = new CsvWriterFactory(csvStringifierFactory); - -export const createArrayCsvStringifier = (params: ArrayCsvStringifierParams) => - csvStringifierFactory.createArrayCsvStringifier(params); - -export const createObjectCsvStringifier = (params: ObjectCsvStringifierParams) => - csvStringifierFactory.createObjectCsvStringifier(params); - -export const createArrayCsvWriter = (params: ArrayCsvWriterParams) => - csvWriterFactory.createArrayCsvWriter(params); - -export const createObjectCsvWriter = (params: ObjectCsvWriterParams) => - csvWriterFactory.createObjectCsvWriter(params); diff --git a/bin/node_modules/csv-writer/src/lib/csv-stringifier-factory.ts b/bin/node_modules/csv-writer/src/lib/csv-stringifier-factory.ts deleted file mode 100644 index 698e92f..0000000 --- a/bin/node_modules/csv-writer/src/lib/csv-stringifier-factory.ts +++ /dev/null @@ -1,33 +0,0 @@ -import {ArrayCsvStringifier} from './csv-stringifiers/array'; -import {createFieldStringifier} from './field-stringifier'; -import {ObjectCsvStringifier} from './csv-stringifiers/object'; -import {ObjectStringifierHeader} from './record'; - -export interface ArrayCsvStringifierParams { - header?: string[]; - fieldDelimiter?: string; - recordDelimiter?: string; - alwaysQuote?: boolean; -} - -export interface ObjectCsvStringifierParams { - header: ObjectStringifierHeader; - fieldDelimiter?: string; - recordDelimiter?: string; - headerIdDelimiter?: string; - alwaysQuote?: boolean; -} - -export class CsvStringifierFactory { - - createArrayCsvStringifier(params: ArrayCsvStringifierParams) { - const fieldStringifier = createFieldStringifier(params.fieldDelimiter, params.alwaysQuote); - return new ArrayCsvStringifier(fieldStringifier, params.recordDelimiter, params.header); - } - - createObjectCsvStringifier(params: ObjectCsvStringifierParams) { - const fieldStringifier = createFieldStringifier(params.fieldDelimiter, params.alwaysQuote); - return new ObjectCsvStringifier(fieldStringifier, params.header, params.recordDelimiter, params.headerIdDelimiter); - } - -} diff --git a/bin/node_modules/csv-writer/src/lib/csv-stringifiers/abstract.ts b/bin/node_modules/csv-writer/src/lib/csv-stringifiers/abstract.ts deleted file mode 100644 index 5fa27ca..0000000 --- a/bin/node_modules/csv-writer/src/lib/csv-stringifiers/abstract.ts +++ /dev/null @@ -1,43 +0,0 @@ -import {FieldStringifier} from '../field-stringifier'; -import {Field} from '../record'; - -const DEFAULT_RECORD_DELIMITER = '\n'; -const VALID_RECORD_DELIMITERS = [DEFAULT_RECORD_DELIMITER, '\r\n']; - -export abstract class CsvStringifier { - - constructor(private readonly fieldStringifier: FieldStringifier, - private readonly recordDelimiter = DEFAULT_RECORD_DELIMITER) { - _validateRecordDelimiter(recordDelimiter); - } - - getHeaderString(): string | null { - const headerRecord = this.getHeaderRecord(); - return headerRecord ? this.joinRecords([this.getCsvLine(headerRecord)]) : null; - } - - stringifyRecords(records: IterableIterator | T[]): string { - const csvLines = Array.from(records, record => this.getCsvLine(this.getRecordAsArray(record))); - return this.joinRecords(csvLines); - } - - protected abstract getRecordAsArray(_record: T): Field[]; - - protected abstract getHeaderRecord(): string[] | null | undefined; - - private getCsvLine(record: Field[]): string { - return record - .map(fieldValue => this.fieldStringifier.stringify(fieldValue)) - .join(this.fieldStringifier.fieldDelimiter); - } - - private joinRecords(records: string[]) { - return records.join(this.recordDelimiter) + this.recordDelimiter; - } -} - -function _validateRecordDelimiter(delimiter: string): void { - if (VALID_RECORD_DELIMITERS.indexOf(delimiter) === -1) { - throw new Error(`Invalid record delimiter \`${delimiter}\` is specified`); - } -} diff --git a/bin/node_modules/csv-writer/src/lib/csv-stringifiers/array.ts b/bin/node_modules/csv-writer/src/lib/csv-stringifiers/array.ts deleted file mode 100644 index 6ba0f16..0000000 --- a/bin/node_modules/csv-writer/src/lib/csv-stringifiers/array.ts +++ /dev/null @@ -1,20 +0,0 @@ -import {CsvStringifier} from './abstract'; -import {FieldStringifier} from '../field-stringifier'; -import {Field} from '../record'; - -export class ArrayCsvStringifier extends CsvStringifier { - - constructor(fieldStringifier: FieldStringifier, - recordDelimiter?: string, - private readonly header?: string[]) { - super(fieldStringifier, recordDelimiter); - } - - protected getHeaderRecord() { - return this.header; - } - - protected getRecordAsArray(record: Field[]): Field[] { - return record; - } -} diff --git a/bin/node_modules/csv-writer/src/lib/csv-stringifiers/object.ts b/bin/node_modules/csv-writer/src/lib/csv-stringifiers/object.ts deleted file mode 100644 index 45b54b7..0000000 --- a/bin/node_modules/csv-writer/src/lib/csv-stringifiers/object.ts +++ /dev/null @@ -1,36 +0,0 @@ -import {CsvStringifier} from './abstract'; -import {FieldStringifier} from '../field-stringifier'; -import {Field, ObjectHeaderItem, ObjectStringifierHeader} from '../record'; -import {isObject, ObjectMap} from '../lang/object'; - -export class ObjectCsvStringifier extends CsvStringifier> { - - constructor(fieldStringifier: FieldStringifier, - private readonly header: ObjectStringifierHeader, - recordDelimiter?: string, - private readonly headerIdDelimiter?: string) { - super(fieldStringifier, recordDelimiter); - } - - protected getHeaderRecord(): string[] | null { - if (!this.isObjectHeader) return null; - return (this.header as ObjectHeaderItem[]).map(field => field.title); - } - - protected getRecordAsArray(record: ObjectMap): Field[] { - return this.fieldIds.map(fieldId => this.getNestedValue(record, fieldId)); - } - - private getNestedValue(obj: ObjectMap, key: string) { - if (!this.headerIdDelimiter) return obj[key]; - return key.split(this.headerIdDelimiter).reduce((subObj, keyPart) => (subObj || {})[keyPart], obj); - } - - private get fieldIds(): string[] { - return this.isObjectHeader ? (this.header as ObjectHeaderItem[]).map(column => column.id) : (this.header as string[]); - } - - private get isObjectHeader(): boolean { - return isObject(this.header && this.header[0]); - } -} diff --git a/bin/node_modules/csv-writer/src/lib/csv-writer-factory.ts b/bin/node_modules/csv-writer/src/lib/csv-writer-factory.ts deleted file mode 100644 index 070dd0b..0000000 --- a/bin/node_modules/csv-writer/src/lib/csv-writer-factory.ts +++ /dev/null @@ -1,49 +0,0 @@ -import {CsvWriter} from './csv-writer'; -import {CsvStringifierFactory} from './csv-stringifier-factory'; -import {ObjectStringifierHeader} from './record'; - -export interface ArrayCsvWriterParams { - path: string; - header?: string[]; - fieldDelimiter?: string; - recordDelimiter?: string; - alwaysQuote?: boolean; - encoding?: string; - append?: boolean; -} - -export interface ObjectCsvWriterParams { - path: string; - header: ObjectStringifierHeader; - fieldDelimiter?: string; - recordDelimiter?: string; - headerIdDelimiter?: string; - alwaysQuote?: boolean; - encoding?: string; - append?: boolean; -} - -export class CsvWriterFactory { - constructor(private readonly csvStringifierFactory: CsvStringifierFactory) {} - - createArrayCsvWriter(params: ArrayCsvWriterParams) { - const csvStringifier = this.csvStringifierFactory.createArrayCsvStringifier({ - header: params.header, - fieldDelimiter: params.fieldDelimiter, - recordDelimiter: params.recordDelimiter, - alwaysQuote: params.alwaysQuote - }); - return new CsvWriter(csvStringifier, params.path, params.encoding, params.append); - } - - createObjectCsvWriter(params: ObjectCsvWriterParams) { - const csvStringifier = this.csvStringifierFactory.createObjectCsvStringifier({ - header: params.header, - fieldDelimiter: params.fieldDelimiter, - recordDelimiter: params.recordDelimiter, - headerIdDelimiter: params.headerIdDelimiter, - alwaysQuote: params.alwaysQuote - }); - return new CsvWriter(csvStringifier, params.path, params.encoding, params.append); - } -} diff --git a/bin/node_modules/csv-writer/src/lib/csv-writer.ts b/bin/node_modules/csv-writer/src/lib/csv-writer.ts deleted file mode 100644 index 6fc2047..0000000 --- a/bin/node_modules/csv-writer/src/lib/csv-writer.ts +++ /dev/null @@ -1,27 +0,0 @@ -import {CsvStringifier} from './csv-stringifiers/abstract'; -import {FileWriter} from './file-writer'; - -const DEFAULT_INITIAL_APPEND_FLAG = false; - -export class CsvWriter { - private readonly fileWriter: FileWriter; - - constructor(private readonly csvStringifier: CsvStringifier, - path: string, - encoding?: string, - private append = DEFAULT_INITIAL_APPEND_FLAG) { - this.fileWriter = new FileWriter(path, this.append, encoding); - } - - async writeRecords(records: T[]): Promise { - const recordsString = this.csvStringifier.stringifyRecords(records); - const writeString = this.headerString + recordsString; - await this.fileWriter.write(writeString); - this.append = true; - } - - private get headerString(): string { - const headerString = !this.append && this.csvStringifier.getHeaderString(); - return headerString || ''; - } -} diff --git a/bin/node_modules/csv-writer/src/lib/field-stringifier.ts b/bin/node_modules/csv-writer/src/lib/field-stringifier.ts deleted file mode 100644 index 019057a..0000000 --- a/bin/node_modules/csv-writer/src/lib/field-stringifier.ts +++ /dev/null @@ -1,47 +0,0 @@ -import {Field} from './record'; - -const DEFAULT_FIELD_DELIMITER = ','; -const VALID_FIELD_DELIMITERS = [DEFAULT_FIELD_DELIMITER, ';']; - -export abstract class FieldStringifier { - constructor(public readonly fieldDelimiter: string) {} - - abstract stringify(value?: Field): string; - - protected isEmpty(value?: Field): boolean { - return typeof value === 'undefined' || value === null || value === ''; - } - - protected quoteField(field: string): string { - return `"${field.replace(/"/g, '""')}"`; - } -} - -class DefaultFieldStringifier extends FieldStringifier { - stringify(value?: Field): string { - if (this.isEmpty(value)) return ''; - const str = String(value); - return this.needsQuote(str) ? this.quoteField(str) : str; - } - - private needsQuote(str: string): boolean { - return str.includes(this.fieldDelimiter) || str.includes('\n') || str.includes('"'); - } -} - -class ForceQuoteFieldStringifier extends FieldStringifier { - stringify(value?: Field): string { - return this.isEmpty(value) ? '' : this.quoteField(String(value)); - } -} - -export function createFieldStringifier(fieldDelimiter: string = DEFAULT_FIELD_DELIMITER, alwaysQuote = false) { - _validateFieldDelimiter(fieldDelimiter); - return alwaysQuote ? new ForceQuoteFieldStringifier(fieldDelimiter) : new DefaultFieldStringifier(fieldDelimiter); -} - -function _validateFieldDelimiter(delimiter: string): void { - if (VALID_FIELD_DELIMITERS.indexOf(delimiter) === -1) { - throw new Error(`Invalid field delimiter \`${delimiter}\` is specified`); - } -} diff --git a/bin/node_modules/csv-writer/src/lib/file-writer.ts b/bin/node_modules/csv-writer/src/lib/file-writer.ts deleted file mode 100644 index 30ce9dd..0000000 --- a/bin/node_modules/csv-writer/src/lib/file-writer.ts +++ /dev/null @@ -1,26 +0,0 @@ -import {promisify} from './lang/promise'; -import {writeFile} from 'fs'; - -const writeFilePromise = promisify(writeFile); - -const DEFAULT_ENCODING = 'utf8'; - -export class FileWriter { - - constructor(private readonly path: string, - private append: boolean, - private readonly encoding = DEFAULT_ENCODING) { - } - - async write(string: string): Promise { - await writeFilePromise(this.path, string, this.getWriteOption()); - this.append = true; - } - - private getWriteOption() { - return { - encoding: this.encoding, - flag: this.append ? 'a' : 'w' - }; - } -} diff --git a/bin/node_modules/csv-writer/src/lib/lang/object.ts b/bin/node_modules/csv-writer/src/lib/lang/object.ts deleted file mode 100644 index 0f0f76f..0000000 --- a/bin/node_modules/csv-writer/src/lib/lang/object.ts +++ /dev/null @@ -1,6 +0,0 @@ -export const isObject = (value: any) => - Object.prototype.toString.call(value) === '[object Object]'; - -export interface ObjectMap { - [k: string]: T; -} diff --git a/bin/node_modules/csv-writer/src/lib/lang/promise.ts b/bin/node_modules/csv-writer/src/lib/lang/promise.ts deleted file mode 100644 index 6b53b15..0000000 --- a/bin/node_modules/csv-writer/src/lib/lang/promise.ts +++ /dev/null @@ -1,14 +0,0 @@ - -type NullableError = Error | null; - -export function promisify(fn: (...args: any[]) => void): (...args: any[]) => any { - return (...args: any[]) => { - return new Promise((resolve, reject) => { - const nodeCallback = (err: NullableError, result: any) => { - if (err) reject(err); - else resolve(result); - }; - fn.apply(null, [...args, nodeCallback]); - }); - }; -} diff --git a/bin/node_modules/csv-writer/src/lib/record.ts b/bin/node_modules/csv-writer/src/lib/record.ts deleted file mode 100644 index e757a96..0000000 --- a/bin/node_modules/csv-writer/src/lib/record.ts +++ /dev/null @@ -1,5 +0,0 @@ - -export type Field = any; - -export type ObjectHeaderItem = { id: string; title: string }; -export type ObjectStringifierHeader = ObjectHeaderItem[] | string[]; diff --git a/bin/node_modules/csv-writer/src/test/csv-stringifiers/array.test.ts b/bin/node_modules/csv-writer/src/test/csv-stringifiers/array.test.ts deleted file mode 100644 index 49e4ed2..0000000 --- a/bin/node_modules/csv-writer/src/test/csv-stringifiers/array.test.ts +++ /dev/null @@ -1,100 +0,0 @@ -import {resolveDelimiterChar} from '../helper/delimiter'; -import {createArrayCsvStringifier} from '../../index'; -import {strictEqual, throws} from 'assert'; - -describe('ArrayCsvStringifier', () => { - const records = [ - ['FIELD_A1', 'FIELD_B1'], - ['FIELD_A2', 'FIELD_B2'] - ]; - - describe('When field delimiter is comma', generateTestCases()); - - describe('When field delimiter is semicolon', generateTestCases(';')); - - describe('When field delimiter is neither comma nor semicolon', () => { - it('throws an exception', () => { - throws(() => { - createArrayCsvStringifier({fieldDelimiter: '/'}); - }); - }); - }); - - describe('When record delimiter is neither LF nor CR+LF', () => { - it('throws an exception', () => { - throws(() => { - createArrayCsvStringifier({recordDelimiter: '\r'}); - }); - }); - }); - - describe('When records input is an iterable other than an array', () => { - const stringifier = createArrayCsvStringifier({ - header: ['TITLE_A', 'TITLE_B'] - }); - function * recordGenerator() { - yield records[0]; - yield records[1]; - } - - it('converts the records into CSV', () => { - strictEqual( - stringifier.stringifyRecords(recordGenerator()), - 'FIELD_A1,FIELD_B1\nFIELD_A2,FIELD_B2\n' - ); - }); - }); - - describe('When `alwaysQuote` flag is set', () => { - const stringifier = createArrayCsvStringifier({ - header: ['TITLE_A', 'TITLE_B'], - alwaysQuote: true - }); - - it('quotes all header fields', () => { - strictEqual(stringifier.getHeaderString(), '"TITLE_A","TITLE_B"\n'); - }); - - it('quotes all data fields', () => { - strictEqual(stringifier.stringifyRecords(records), '"FIELD_A1","FIELD_B1"\n"FIELD_A2","FIELD_B2"\n'); - }); - }); - - function generateTestCases(fieldDelimiter?: string) { - const delim = resolveDelimiterChar(fieldDelimiter); - return () => { - describe('header is specified as a list of column titles', () => { - const stringifier = createArrayCsvStringifier({ - header: ['TITLE_A', 'TITLE_B'], - fieldDelimiter - }); - - it(`returns a header line with field separated by "${delim}"`, () => { - strictEqual(stringifier.getHeaderString(), `TITLE_A${delim}TITLE_B\n`); - }); - - it(`converts given data records into CSV lines with field separated by "${delim}"`, () => { - strictEqual( - stringifier.stringifyRecords(records), - `FIELD_A1${delim}FIELD_B1\nFIELD_A2${delim}FIELD_B2\n` - ); - }); - }); - - describe('header is not specified', () => { - const stringifier = createArrayCsvStringifier({fieldDelimiter}); - - it('returns null for header line', () => { - strictEqual(stringifier.getHeaderString(), null); - }); - - it(`converts given data records into CSV lines with field separated by "${delim}"`, () => { - strictEqual( - stringifier.stringifyRecords(records), - `FIELD_A1${delim}FIELD_B1\nFIELD_A2${delim}FIELD_B2\n` - ); - }); - }); - }; - } -}); diff --git a/bin/node_modules/csv-writer/src/test/csv-stringifiers/object.test.ts b/bin/node_modules/csv-writer/src/test/csv-stringifiers/object.test.ts deleted file mode 100644 index fc88f28..0000000 --- a/bin/node_modules/csv-writer/src/test/csv-stringifiers/object.test.ts +++ /dev/null @@ -1,150 +0,0 @@ -import {resolveDelimiterChar} from '../helper/delimiter'; -import {createObjectCsvStringifier} from '../../index'; -import {strictEqual, throws} from 'assert'; - -describe('ObjectCsvStringifier', () => { - const records = [ - {FIELD_A: 'VALUE_A1', FIELD_B: 'VALUE_B1'}, - {FIELD_A: 'VALUE_A2', FIELD_B: 'VALUE_B2', OTHERS: {FIELD_C: 'VALUE_C2'}} - ]; - - describe('When field delimiter is comma', generateTestCases()); - - describe('When field delimiter is semicolon', generateTestCases(';')); - - describe('When field delimiter is neither comma nor semicolon', () => { - it('throws an exception', () => { - throws(() => { - createObjectCsvStringifier({ - header: ['FIELD_A', 'FIELD_B'], - fieldDelimiter: '/' - }); - }); - }); - }); - - describe('When record delimiter is neither LF nor CR+LF', () => { - it('throws an exception', () => { - throws(() => { - createObjectCsvStringifier({ - header: ['FIELD_A', 'FIELD_B'], - recordDelimiter: '\r' - }); - }); - }); - }); - - describe('When records input is an iterable other than an array', () => { - const stringifier = createObjectCsvStringifier({ - header: ['FIELD_A', 'FIELD_B'] - }); - function * recordGenerator() { - yield records[0]; - yield records[1]; - } - - it('converts the records into CSV', () => { - strictEqual( - stringifier.stringifyRecords(recordGenerator()), - 'VALUE_A1,VALUE_B1\nVALUE_A2,VALUE_B2\n' - ); - }); - }); - - describe('When `alwaysQuote` flag is set', () => { - const stringifier = createObjectCsvStringifier({ - header: [ - {id: 'FIELD_A', title: 'TITLE_A'}, - {id: 'FIELD_B', title: 'TITLE_B'} - ], - alwaysQuote: true - }); - - it('quotes all header fields', () => { - strictEqual(stringifier.getHeaderString(), '"TITLE_A","TITLE_B"\n'); - }); - - it('quotes all data fields', () => { - strictEqual(stringifier.stringifyRecords(records), '"VALUE_A1","VALUE_B1"\n"VALUE_A2","VALUE_B2"\n'); - }); - }); - - describe('When `headerIdDelimiter` is set', () => { - const stringifier = createObjectCsvStringifier({ - header: [ - {id: 'FIELD_A', title: 'TITLE_A'}, - {id: 'OTHERS/FIELD_C', title: 'TITLE_C'} - ], - headerIdDelimiter: '/' - }); - - it('uses the title as is', () => { - strictEqual(stringifier.getHeaderString(), 'TITLE_A,TITLE_C\n'); - }); - - it('picks up a value in nested objects', () => { - strictEqual(stringifier.stringifyRecords(records), 'VALUE_A1,\nVALUE_A2,VALUE_C2\n'); - }); - }); - - function generateTestCases(fieldDelimiter?: string) { - const delim = resolveDelimiterChar(fieldDelimiter); - return () => { - describe('header is specified with title', () => { - const stringifier = createObjectCsvStringifier({ - header: [ - {id: 'FIELD_A', title: 'TITLE_A'}, - {id: 'FIELD_B', title: 'TITLE_B'} - ], - fieldDelimiter - }); - - it(`returns a header line with field separated by "${delim}"`, () => { - strictEqual(stringifier.getHeaderString(), `TITLE_A${delim}TITLE_B\n`); - }); - - it(`converts given data records into CSV lines with field separated by "${delim}"`, () => { - strictEqual( - stringifier.stringifyRecords(records), - `VALUE_A1${delim}VALUE_B1\nVALUE_A2${delim}VALUE_B2\n` - ); - }); - }); - - describe('header is specified without title', () => { - const stringifier = createObjectCsvStringifier({ - header: ['FIELD_A', 'FIELD_B'], - fieldDelimiter - }); - - it('returns null for header line', () => { - strictEqual(stringifier.getHeaderString(), null); - }); - - it(`converts given data records into CSV lines with field separated by "${delim}"`, () => { - strictEqual( - stringifier.stringifyRecords(records), - `VALUE_A1${delim}VALUE_B1\nVALUE_A2${delim}VALUE_B2\n` - ); - }); - }); - - describe('header columns are given with reverse order', () => { - const stringifier = createObjectCsvStringifier({ - header: [ - {id: 'FIELD_B', title: 'TITLE_B'}, - {id: 'FIELD_A', title: 'TITLE_A'} - ], - fieldDelimiter - }); - - it(`layouts fields with the order of headers given with field separated by "${delim}"`, () => { - strictEqual( - stringifier.stringifyRecords(records), - `VALUE_B1${delim}VALUE_A1\nVALUE_B2${delim}VALUE_A2\n` - ); - }); - }); - }; - } -}); diff --git a/bin/node_modules/csv-writer/src/test/field-stringifier.test.ts b/bin/node_modules/csv-writer/src/test/field-stringifier.test.ts deleted file mode 100644 index 93dbbea..0000000 --- a/bin/node_modules/csv-writer/src/test/field-stringifier.test.ts +++ /dev/null @@ -1,97 +0,0 @@ -import {resolveDelimiterChar} from './helper/delimiter'; -import {createFieldStringifier} from '../lib/field-stringifier'; -import {strictEqual} from 'assert'; - -describe('DefaultFieldStringifier', () => { - - describe('When field delimiter is comma', generateTestCases(',')); - - describe('When field delimiter is semicolon', generateTestCases(';')); - - describe('When all fields needs to be quoted', () => { - const stringifier = createFieldStringifier(',', true); - - it('quotes a field', () => { - strictEqual(stringifier.stringify('VALUE'), '"VALUE"'); - }); - - it('does not quote a field of value undefined', () => { - strictEqual(stringifier.stringify(), ''); - }); - - it('does not quote a field of value null', () => { - strictEqual(stringifier.stringify(null), ''); - }); - - it('does not quote a field of value empty string', () => { - strictEqual(stringifier.stringify(''), ''); - }); - }); - - function generateTestCases(fieldDelimiter: string) { - const delim = resolveDelimiterChar(fieldDelimiter); - return () => { - const stringifier = createFieldStringifier(fieldDelimiter); - - it('returns the same string', () => { - strictEqual(stringifier.stringify('VALUE'), 'VALUE'); - }); - - it('preserves the whitespace characters', () => { - strictEqual(stringifier.stringify(' VALUE\tA '), ' VALUE\tA '); - }); - - it(`wraps a field value with double quotes if the field contains "${delim}"`, () => { - strictEqual(stringifier.stringify(`VALUE${delim}A`), `"VALUE${delim}A"`); - }); - - it('wraps a field value with double quotes if the field contains newline', () => { - strictEqual(stringifier.stringify('VALUE\nA'), '"VALUE\nA"'); - }); - - it('wraps a field value with double quotes and escape the double quotes if they are used in the field', () => { - strictEqual(stringifier.stringify('VALUE"A'), '"VALUE""A"'); - }); - - it('escapes double quotes even if double quotes are only on the both edges of the field', () => { - strictEqual(stringifier.stringify('"VALUE"'), '"""VALUE"""'); - }); - - it('converts a number into a string', () => { - strictEqual(stringifier.stringify(1), '1'); - }); - - it('converts undefined into an empty string', () => { - strictEqual(stringifier.stringify(), ''); - }); - - it('converts null into an empty string', () => { - strictEqual(stringifier.stringify(null), ''); - }); - - it('converts an object into toString-ed value', () => { - const obj = { - name: 'OBJECT_NAME', - toString: function () { return `Name: ${this.name}`; } - }; - strictEqual(stringifier.stringify(obj), 'Name: OBJECT_NAME'); - }); - - it(`wraps a toString-ed field value with double quote if the value contains "${delim}"`, () => { - const obj = { - name: `OBJECT${delim}NAME`, - toString: function () { return `Name: ${this.name}`; } - }; - strictEqual(stringifier.stringify(obj), `"Name: OBJECT${delim}NAME"`); - }); - - it('escapes double quotes in a toString-ed field value if the value has double quotes', () => { - const obj = { - name: 'OBJECT_NAME"', - toString: function () { return `Name: ${this.name}`; } - }; - strictEqual(stringifier.stringify(obj), '"Name: OBJECT_NAME"""'); - }); - }; - } -}); diff --git a/bin/node_modules/csv-writer/src/test/helper.ts b/bin/node_modules/csv-writer/src/test/helper.ts deleted file mode 100644 index e08c4b7..0000000 --- a/bin/node_modules/csv-writer/src/test/helper.ts +++ /dev/null @@ -1,16 +0,0 @@ -import {strictEqual} from 'assert'; -import {readFileSync} from 'fs'; - -export const testFilePath = (id: string) => `./test-tmp/${id}.csv`; - -export const assertFile = (path: string, expectedContents: string, encoding?: string) => { - const actualContents = readFileSync(path, encoding || 'utf8'); - strictEqual(actualContents, expectedContents); -}; - -export const assertRejected = (p: Promise, message: string) => { - return p.then( - () => new Error('Should not have been called'), - (e: Error) => { strictEqual(e.message, message); } - ); -}; diff --git a/bin/node_modules/csv-writer/src/test/helper/delimiter.ts b/bin/node_modules/csv-writer/src/test/helper/delimiter.ts deleted file mode 100644 index 75ca063..0000000 --- a/bin/node_modules/csv-writer/src/test/helper/delimiter.ts +++ /dev/null @@ -1,6 +0,0 @@ - -export const resolveDelimiterChar = (char?: string) => { - if (char === ',' || char === ';') return char; - if (typeof char === 'undefined') return ','; - throw new Error('Invalid field delimiter'); -}; diff --git a/bin/node_modules/csv-writer/src/test/lang/promise.test.ts b/bin/node_modules/csv-writer/src/test/lang/promise.test.ts deleted file mode 100644 index 6782f1b..0000000 --- a/bin/node_modules/csv-writer/src/test/lang/promise.test.ts +++ /dev/null @@ -1,21 +0,0 @@ -import {promisify} from '../../lib/lang/promise'; -import {strictEqual} from 'assert'; -import {assertRejected} from '../helper'; - -describe('Promise', () => { - const greetAsync = (name: string, callback: (err: Error | null, result?: string) => void) => { - setTimeout(() => { - if (name === 'foo') callback(null, `Hello, ${name}!`); - else callback(new Error(`We don't know ${name}`)); - }, 0); - }; - const promisifiedFn = promisify(greetAsync); - - it('promisify node style callback', async () => { - strictEqual(await promisifiedFn('foo'), 'Hello, foo!'); - }); - - it('raise an error for error', async () => { - await assertRejected(promisifiedFn('bar'), "We don't know bar"); - }); -}); diff --git a/bin/node_modules/csv-writer/src/test/write-array-records.test.ts b/bin/node_modules/csv-writer/src/test/write-array-records.test.ts deleted file mode 100644 index 9ae0a75..0000000 --- a/bin/node_modules/csv-writer/src/test/write-array-records.test.ts +++ /dev/null @@ -1,124 +0,0 @@ -import {assertFile, testFilePath} from './helper'; -import {CsvWriter} from '../lib/csv-writer'; -import {writeFileSync} from 'fs'; -import {createArrayCsvWriter} from '../index'; - -describe('Write array records into CSV', () => { - - const makeFilePath = (id: string) => testFilePath(`array-${id}`); - const records = [ - ['Bob', 'French'], - ['Mary', 'English'] - ]; - - describe('When only path is specified', () => { - const filePath = makeFilePath('minimum'); - let writer: CsvWriter; - - beforeEach(() => { - writer = createArrayCsvWriter({path: filePath}); - }); - - it('writes records to a new file', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'Bob,French\nMary,English\n'); - }); - - it('appends records when requested to write to the same file', async () => { - await writer.writeRecords([records[0]]); - await writer.writeRecords([records[1]]); - assertFile(filePath, 'Bob,French\nMary,English\n'); - }); - }); - - describe('When field header is given', () => { - const filePath = makeFilePath('header'); - let writer: CsvWriter; - - beforeEach(() => { - writer = createArrayCsvWriter({ - path: filePath, - header: ['NAME', 'LANGUAGE'] - }); - }); - - it('writes a header', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n'); - }); - - it('appends records without headers', async () => { - await writer.writeRecords([records[0]]); - await writer.writeRecords([records[1]]); - assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n'); - }); - }); - - describe('When `append` flag is specified', () => { - const filePath = makeFilePath('append'); - writeFileSync(filePath, 'Mike,German\n', 'utf8'); - const writer = createArrayCsvWriter({ - path: filePath, - append: true - }); - - it('do not overwrite the existing contents and appends records to them', async () => { - await writer.writeRecords([records[1]]); - assertFile(filePath, 'Mike,German\nMary,English\n'); - }); - }); - - describe('When encoding is specified', () => { - const filePath = makeFilePath('encoding'); - const writer = createArrayCsvWriter({ - path: filePath, - encoding: 'utf16le' - }); - - it('writes to a file with the specified encoding', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'Bob,French\nMary,English\n', 'utf16le'); - }); - }); - - describe('When semicolon is specified as a field delimiter', () => { - const filePath = makeFilePath('field-delimiter'); - const writer = createArrayCsvWriter({ - path: filePath, - header: ['NAME', 'LANGUAGE'], - fieldDelimiter: ';' - }); - - it('uses semicolon instead of comma to separate fields', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'NAME;LANGUAGE\nBob;French\nMary;English\n'); - }); - }); - - describe('When newline is specified', () => { - const filePath = makeFilePath('newline'); - const writer = createArrayCsvWriter({ - path: filePath, - recordDelimiter: '\r\n' - }); - - it('writes to a file with the specified newline character', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'Bob,French\r\nMary,English\r\n'); - }); - }); - - describe('When `alwaysQuote` flag is set', () => { - const filePath = makeFilePath('always-quote'); - const writer = createArrayCsvWriter({ - path: filePath, - header: ['NAME', 'LANGUAGE'], - alwaysQuote: true - }); - - it('quotes all fields', async () => { - await writer.writeRecords(records); - assertFile(filePath, '"NAME","LANGUAGE"\n"Bob","French"\n"Mary","English"\n'); - }); - }); -}); diff --git a/bin/node_modules/csv-writer/src/test/write-object-records.test.ts b/bin/node_modules/csv-writer/src/test/write-object-records.test.ts deleted file mode 100644 index f94f02f..0000000 --- a/bin/node_modules/csv-writer/src/test/write-object-records.test.ts +++ /dev/null @@ -1,158 +0,0 @@ -import {assertFile, testFilePath} from './helper'; -import {CsvWriter} from '../lib/csv-writer'; -import {writeFileSync} from 'fs'; -import {createObjectCsvWriter} from '../index'; -import {ObjectMap} from '../lib/lang/object'; - -describe('Write object records into CSV', () => { - - const makeFilePath = (id: string) => testFilePath(`object-${id}`); - const records = [ - {name: 'Bob', lang: 'French', address: {country: 'France'}}, - {name: 'Mary', lang: 'English'} - ]; - - describe('When only path and header ids are given', () => { - const filePath = makeFilePath('minimum'); - let writer: CsvWriter>; - - beforeEach(() => { - writer = createObjectCsvWriter({ - path: filePath, - header: ['name', 'lang'] - }); - }); - - it('writes records to a new file', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'Bob,French\nMary,English\n'); - }); - - it('appends records when requested to write to the same file', async () => { - await writer.writeRecords([records[0]]); - await writer.writeRecords([records[1]]); - assertFile(filePath, 'Bob,French\nMary,English\n'); - }); - }); - - describe('When header ids are given with reverse order', () => { - const filePath = makeFilePath('column-order'); - const writer = createObjectCsvWriter({ - path: filePath, - header: ['lang', 'name'] - }); - - it('also writes columns with reverse order', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'French,Bob\nEnglish,Mary\n'); - }); - }); - - describe('When field header is given with titles', () => { - const filePath = makeFilePath('header'); - let writer: CsvWriter>; - - beforeEach(() => { - writer = createObjectCsvWriter({ - path: filePath, - header: [{id: 'name', title: 'NAME'}, {id: 'lang', title: 'LANGUAGE'}] - }); - }); - - it('writes a header', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n'); - }); - - it('appends records without headers', async () => { - await writer.writeRecords([records[0]]); - await writer.writeRecords([records[1]]); - assertFile(filePath, 'NAME,LANGUAGE\nBob,French\nMary,English\n'); - }); - }); - - describe('When `append` flag is specified', () => { - const filePath = makeFilePath('append'); - writeFileSync(filePath, 'Mike,German\n', 'utf8'); - const writer = createObjectCsvWriter({ - path: filePath, - header: ['name', 'lang'], - append: true - }); - - it('do not overwrite the existing contents and appends records to them', async () => { - await writer.writeRecords([records[1]]); - assertFile(filePath, 'Mike,German\nMary,English\n'); - }); - }); - - describe('When encoding is specified', () => { - const filePath = makeFilePath('encoding'); - const writer = createObjectCsvWriter({ - path: filePath, - header: ['name', 'lang'], - encoding: 'utf16le' - }); - - it('writes to a file with the specified encoding', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'Bob,French\nMary,English\n', 'utf16le'); - }); - }); - - describe('When semicolon is specified as a field delimiter', () => { - const filePath = makeFilePath('field-delimiter'); - const writer = createObjectCsvWriter({ - path: filePath, - header: [{id: 'name', title: 'NAME'}, {id: 'lang', title: 'LANGUAGE'}], - fieldDelimiter: ';' - }); - - it('uses semicolon instead of comma to separate fields', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'NAME;LANGUAGE\nBob;French\nMary;English\n'); - }); - }); - - describe('When newline is specified', () => { - const filePath = makeFilePath('newline'); - const writer = createObjectCsvWriter({ - path: filePath, - header: ['name', 'lang'], - recordDelimiter: '\r\n' - }); - - it('writes to a file with the specified newline character', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'Bob,French\r\nMary,English\r\n'); - }); - }); - - describe('When `alwaysQuote` flag is set', () => { - const filePath = makeFilePath('always-quote'); - const writer = createObjectCsvWriter({ - path: filePath, - header: [{id: 'name', title: 'NAME'}, {id: 'lang', title: 'LANGUAGE'}], - alwaysQuote: true - }); - - it('quotes all fields', async () => { - await writer.writeRecords(records); - assertFile(filePath, '"NAME","LANGUAGE"\n"Bob","French"\n"Mary","English"\n'); - }); - }); - - describe('When `headerIdDelimiter` flag is set', () => { - const filePath = makeFilePath('nested'); - const writer = createObjectCsvWriter({ - path: filePath, - header: [{id: 'name', title: 'NAME'}, {id: 'address.country', title: 'COUNTRY'}], - headerIdDelimiter: '.' - }); - - it('breaks keys into key paths', async () => { - await writer.writeRecords(records); - assertFile(filePath, 'NAME,COUNTRY\nBob,France\nMary,\n'); - }); - }); -}); diff --git a/bin/node_modules/fs/README.md b/bin/node_modules/fs/README.md deleted file mode 100644 index 5e9a74c..0000000 --- a/bin/node_modules/fs/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Security holding package - -This package name is not currently in use, but was formerly occupied -by another package. To avoid malicious use, npm is hanging on to the -package name, but loosely, and we'll probably give it to you if you -want it. - -You may adopt this package by contacting support@npmjs.com and -requesting the name. diff --git a/bin/node_modules/fs/package.json b/bin/node_modules/fs/package.json deleted file mode 100644 index 11661b0..0000000 --- a/bin/node_modules/fs/package.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "name": "fs", - "version": "0.0.1-security", - "description": "", - "main": "index.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/security-holder.git" - }, - "keywords": [], - "author": "", - "license": "ISC", - "bugs": { - "url": "https://github.com/npm/security-holder/issues" - }, - "homepage": "https://github.com/npm/security-holder#readme" -} diff --git a/bin/node_modules/inherits/LICENSE b/bin/node_modules/inherits/LICENSE deleted file mode 100644 index dea3013..0000000 --- a/bin/node_modules/inherits/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. - diff --git a/bin/node_modules/inherits/README.md b/bin/node_modules/inherits/README.md deleted file mode 100644 index b1c5665..0000000 --- a/bin/node_modules/inherits/README.md +++ /dev/null @@ -1,42 +0,0 @@ -Browser-friendly inheritance fully compatible with standard node.js -[inherits](http://nodejs.org/api/util.html#util_util_inherits_constructor_superconstructor). - -This package exports standard `inherits` from node.js `util` module in -node environment, but also provides alternative browser-friendly -implementation through [browser -field](https://gist.github.com/shtylman/4339901). Alternative -implementation is a literal copy of standard one located in standalone -module to avoid requiring of `util`. It also has a shim for old -browsers with no `Object.create` support. - -While keeping you sure you are using standard `inherits` -implementation in node.js environment, it allows bundlers such as -[browserify](https://github.com/substack/node-browserify) to not -include full `util` package to your client code if all you need is -just `inherits` function. It worth, because browser shim for `util` -package is large and `inherits` is often the single function you need -from it. - -It's recommended to use this package instead of -`require('util').inherits` for any code that has chances to be used -not only in node.js but in browser too. - -## usage - -```js -var inherits = require('inherits'); -// then use exactly as the standard one -``` - -## note on version ~1.0 - -Version ~1.0 had completely different motivation and is not compatible -neither with 2.0 nor with standard node.js `inherits`. - -If you are using version ~1.0 and planning to switch to ~2.0, be -careful: - -* new version uses `super_` instead of `super` for referencing - superclass -* new version overwrites current prototype while old one preserves any - existing fields on it diff --git a/bin/node_modules/inherits/inherits.js b/bin/node_modules/inherits/inherits.js deleted file mode 100644 index 3b94763..0000000 --- a/bin/node_modules/inherits/inherits.js +++ /dev/null @@ -1,7 +0,0 @@ -try { - var util = require('util'); - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - module.exports = require('./inherits_browser.js'); -} diff --git a/bin/node_modules/inherits/inherits_browser.js b/bin/node_modules/inherits/inherits_browser.js deleted file mode 100644 index c1e78a7..0000000 --- a/bin/node_modules/inherits/inherits_browser.js +++ /dev/null @@ -1,23 +0,0 @@ -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }); - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } -} diff --git a/bin/node_modules/inherits/package.json b/bin/node_modules/inherits/package.json deleted file mode 100644 index 7cf62b9..0000000 --- a/bin/node_modules/inherits/package.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "name": "inherits", - "description": "Browser-friendly inheritance fully compatible with standard node.js inherits()", - "version": "2.0.3", - "keywords": [ - "inheritance", - "class", - "klass", - "oop", - "object-oriented", - "inherits", - "browser", - "browserify" - ], - "main": "./inherits.js", - "browser": "./inherits_browser.js", - "repository": "git://github.com/isaacs/inherits", - "license": "ISC", - "scripts": { - "test": "node test" - }, - "devDependencies": { - "tap": "^7.1.0" - }, - "files": [ - "inherits.js", - "inherits_browser.js" - ] -} diff --git a/bin/node_modules/path/.npmignore b/bin/node_modules/path/.npmignore deleted file mode 100644 index b512c09..0000000 --- a/bin/node_modules/path/.npmignore +++ /dev/null @@ -1 +0,0 @@ -node_modules \ No newline at end of file diff --git a/bin/node_modules/path/LICENSE b/bin/node_modules/path/LICENSE deleted file mode 100644 index a7e984d..0000000 --- a/bin/node_modules/path/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/bin/node_modules/path/README.md b/bin/node_modules/path/README.md deleted file mode 100644 index 6e7d668..0000000 --- a/bin/node_modules/path/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# path - -This is an exact copy of the NodeJS ’path’ module published to the NPM registry. - -[Documentation](http://nodejs.org/docs/latest/api/path.html) - -## Install - -```sh -$ npm install --save path -``` - -## License - -MIT diff --git a/bin/node_modules/path/package.json b/bin/node_modules/path/package.json deleted file mode 100644 index 16109e3..0000000 --- a/bin/node_modules/path/package.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "author": { - "name": "Joyent", - "url": "http://www.joyent.com" - }, - "name": "path", - "description": "Node.JS path module", - "keywords": [ - "ender", - "path" - ], - "license": "MIT", - "version": "0.12.7", - "homepage": "http://nodejs.org/docs/latest/api/path.html", - "repository": { - "type": "git", - "url": "git://github.com/jinder/path.git" - }, - "main": "./path.js", - "dependencies": { - "process": "^0.11.1", - "util": "^0.10.3" - } -} diff --git a/bin/node_modules/path/path.js b/bin/node_modules/path/path.js deleted file mode 100644 index 87b8ee4..0000000 --- a/bin/node_modules/path/path.js +++ /dev/null @@ -1,628 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -'use strict'; - - -var isWindows = process.platform === 'win32'; -var util = require('util'); - - -// resolves . and .. elements in a path array with directory names there -// must be no slashes or device names (c:\) in the array -// (so also no leading and trailing slashes - it does not distinguish -// relative and absolute paths) -function normalizeArray(parts, allowAboveRoot) { - var res = []; - for (var i = 0; i < parts.length; i++) { - var p = parts[i]; - - // ignore empty parts - if (!p || p === '.') - continue; - - if (p === '..') { - if (res.length && res[res.length - 1] !== '..') { - res.pop(); - } else if (allowAboveRoot) { - res.push('..'); - } - } else { - res.push(p); - } - } - - return res; -} - -// returns an array with empty elements removed from either end of the input -// array or the original array if no elements need to be removed -function trimArray(arr) { - var lastIndex = arr.length - 1; - var start = 0; - for (; start <= lastIndex; start++) { - if (arr[start]) - break; - } - - var end = lastIndex; - for (; end >= 0; end--) { - if (arr[end]) - break; - } - - if (start === 0 && end === lastIndex) - return arr; - if (start > end) - return []; - return arr.slice(start, end + 1); -} - -// Regex to split a windows path into three parts: [*, device, slash, -// tail] windows-only -var splitDeviceRe = - /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; - -// Regex to split the tail part of the above into [*, dir, basename, ext] -var splitTailRe = - /^([\s\S]*?)((?:\.{1,2}|[^\\\/]+?|)(\.[^.\/\\]*|))(?:[\\\/]*)$/; - -var win32 = {}; - -// Function to split a filename into [root, dir, basename, ext] -function win32SplitPath(filename) { - // Separate device+slash from tail - var result = splitDeviceRe.exec(filename), - device = (result[1] || '') + (result[2] || ''), - tail = result[3] || ''; - // Split the tail into dir, basename and extension - var result2 = splitTailRe.exec(tail), - dir = result2[1], - basename = result2[2], - ext = result2[3]; - return [device, dir, basename, ext]; -} - -function win32StatPath(path) { - var result = splitDeviceRe.exec(path), - device = result[1] || '', - isUnc = !!device && device[1] !== ':'; - return { - device: device, - isUnc: isUnc, - isAbsolute: isUnc || !!result[2], // UNC paths are always absolute - tail: result[3] - }; -} - -function normalizeUNCRoot(device) { - return '\\\\' + device.replace(/^[\\\/]+/, '').replace(/[\\\/]+/g, '\\'); -} - -// path.resolve([from ...], to) -win32.resolve = function() { - var resolvedDevice = '', - resolvedTail = '', - resolvedAbsolute = false; - - for (var i = arguments.length - 1; i >= -1; i--) { - var path; - if (i >= 0) { - path = arguments[i]; - } else if (!resolvedDevice) { - path = process.cwd(); - } else { - // Windows has the concept of drive-specific current working - // directories. If we've resolved a drive letter but not yet an - // absolute path, get cwd for that drive. We're sure the device is not - // an unc path at this points, because unc paths are always absolute. - path = process.env['=' + resolvedDevice]; - // Verify that a drive-local cwd was found and that it actually points - // to our drive. If not, default to the drive's root. - if (!path || path.substr(0, 3).toLowerCase() !== - resolvedDevice.toLowerCase() + '\\') { - path = resolvedDevice + '\\'; - } - } - - // Skip empty and invalid entries - if (!util.isString(path)) { - throw new TypeError('Arguments to path.resolve must be strings'); - } else if (!path) { - continue; - } - - var result = win32StatPath(path), - device = result.device, - isUnc = result.isUnc, - isAbsolute = result.isAbsolute, - tail = result.tail; - - if (device && - resolvedDevice && - device.toLowerCase() !== resolvedDevice.toLowerCase()) { - // This path points to another device so it is not applicable - continue; - } - - if (!resolvedDevice) { - resolvedDevice = device; - } - if (!resolvedAbsolute) { - resolvedTail = tail + '\\' + resolvedTail; - resolvedAbsolute = isAbsolute; - } - - if (resolvedDevice && resolvedAbsolute) { - break; - } - } - - // Convert slashes to backslashes when `resolvedDevice` points to an UNC - // root. Also squash multiple slashes into a single one where appropriate. - if (isUnc) { - resolvedDevice = normalizeUNCRoot(resolvedDevice); - } - - // At this point the path should be resolved to a full absolute path, - // but handle relative paths to be safe (might happen when process.cwd() - // fails) - - // Normalize the tail path - resolvedTail = normalizeArray(resolvedTail.split(/[\\\/]+/), - !resolvedAbsolute).join('\\'); - - return (resolvedDevice + (resolvedAbsolute ? '\\' : '') + resolvedTail) || - '.'; -}; - - -win32.normalize = function(path) { - var result = win32StatPath(path), - device = result.device, - isUnc = result.isUnc, - isAbsolute = result.isAbsolute, - tail = result.tail, - trailingSlash = /[\\\/]$/.test(tail); - - // Normalize the tail path - tail = normalizeArray(tail.split(/[\\\/]+/), !isAbsolute).join('\\'); - - if (!tail && !isAbsolute) { - tail = '.'; - } - if (tail && trailingSlash) { - tail += '\\'; - } - - // Convert slashes to backslashes when `device` points to an UNC root. - // Also squash multiple slashes into a single one where appropriate. - if (isUnc) { - device = normalizeUNCRoot(device); - } - - return device + (isAbsolute ? '\\' : '') + tail; -}; - - -win32.isAbsolute = function(path) { - return win32StatPath(path).isAbsolute; -}; - -win32.join = function() { - var paths = []; - for (var i = 0; i < arguments.length; i++) { - var arg = arguments[i]; - if (!util.isString(arg)) { - throw new TypeError('Arguments to path.join must be strings'); - } - if (arg) { - paths.push(arg); - } - } - - var joined = paths.join('\\'); - - // Make sure that the joined path doesn't start with two slashes, because - // normalize() will mistake it for an UNC path then. - // - // This step is skipped when it is very clear that the user actually - // intended to point at an UNC path. This is assumed when the first - // non-empty string arguments starts with exactly two slashes followed by - // at least one more non-slash character. - // - // Note that for normalize() to treat a path as an UNC path it needs to - // have at least 2 components, so we don't filter for that here. - // This means that the user can use join to construct UNC paths from - // a server name and a share name; for example: - // path.join('//server', 'share') -> '\\\\server\\share\') - if (!/^[\\\/]{2}[^\\\/]/.test(paths[0])) { - joined = joined.replace(/^[\\\/]{2,}/, '\\'); - } - - return win32.normalize(joined); -}; - - -// path.relative(from, to) -// it will solve the relative path from 'from' to 'to', for instance: -// from = 'C:\\orandea\\test\\aaa' -// to = 'C:\\orandea\\impl\\bbb' -// The output of the function should be: '..\\..\\impl\\bbb' -win32.relative = function(from, to) { - from = win32.resolve(from); - to = win32.resolve(to); - - // windows is not case sensitive - var lowerFrom = from.toLowerCase(); - var lowerTo = to.toLowerCase(); - - var toParts = trimArray(to.split('\\')); - - var lowerFromParts = trimArray(lowerFrom.split('\\')); - var lowerToParts = trimArray(lowerTo.split('\\')); - - var length = Math.min(lowerFromParts.length, lowerToParts.length); - var samePartsLength = length; - for (var i = 0; i < length; i++) { - if (lowerFromParts[i] !== lowerToParts[i]) { - samePartsLength = i; - break; - } - } - - if (samePartsLength == 0) { - return to; - } - - var outputParts = []; - for (var i = samePartsLength; i < lowerFromParts.length; i++) { - outputParts.push('..'); - } - - outputParts = outputParts.concat(toParts.slice(samePartsLength)); - - return outputParts.join('\\'); -}; - - -win32._makeLong = function(path) { - // Note: this will *probably* throw somewhere. - if (!util.isString(path)) - return path; - - if (!path) { - return ''; - } - - var resolvedPath = win32.resolve(path); - - if (/^[a-zA-Z]\:\\/.test(resolvedPath)) { - // path is local filesystem path, which needs to be converted - // to long UNC path. - return '\\\\?\\' + resolvedPath; - } else if (/^\\\\[^?.]/.test(resolvedPath)) { - // path is network UNC path, which needs to be converted - // to long UNC path. - return '\\\\?\\UNC\\' + resolvedPath.substring(2); - } - - return path; -}; - - -win32.dirname = function(path) { - var result = win32SplitPath(path), - root = result[0], - dir = result[1]; - - if (!root && !dir) { - // No dirname whatsoever - return '.'; - } - - if (dir) { - // It has a dirname, strip trailing slash - dir = dir.substr(0, dir.length - 1); - } - - return root + dir; -}; - - -win32.basename = function(path, ext) { - var f = win32SplitPath(path)[2]; - // TODO: make this comparison case-insensitive on windows? - if (ext && f.substr(-1 * ext.length) === ext) { - f = f.substr(0, f.length - ext.length); - } - return f; -}; - - -win32.extname = function(path) { - return win32SplitPath(path)[3]; -}; - - -win32.format = function(pathObject) { - if (!util.isObject(pathObject)) { - throw new TypeError( - "Parameter 'pathObject' must be an object, not " + typeof pathObject - ); - } - - var root = pathObject.root || ''; - - if (!util.isString(root)) { - throw new TypeError( - "'pathObject.root' must be a string or undefined, not " + - typeof pathObject.root - ); - } - - var dir = pathObject.dir; - var base = pathObject.base || ''; - if (!dir) { - return base; - } - if (dir[dir.length - 1] === win32.sep) { - return dir + base; - } - return dir + win32.sep + base; -}; - - -win32.parse = function(pathString) { - if (!util.isString(pathString)) { - throw new TypeError( - "Parameter 'pathString' must be a string, not " + typeof pathString - ); - } - var allParts = win32SplitPath(pathString); - if (!allParts || allParts.length !== 4) { - throw new TypeError("Invalid path '" + pathString + "'"); - } - return { - root: allParts[0], - dir: allParts[0] + allParts[1].slice(0, -1), - base: allParts[2], - ext: allParts[3], - name: allParts[2].slice(0, allParts[2].length - allParts[3].length) - }; -}; - - -win32.sep = '\\'; -win32.delimiter = ';'; - - -// Split a filename into [root, dir, basename, ext], unix version -// 'root' is just a slash, or nothing. -var splitPathRe = - /^(\/?|)([\s\S]*?)((?:\.{1,2}|[^\/]+?|)(\.[^.\/]*|))(?:[\/]*)$/; -var posix = {}; - - -function posixSplitPath(filename) { - return splitPathRe.exec(filename).slice(1); -} - - -// path.resolve([from ...], to) -// posix version -posix.resolve = function() { - var resolvedPath = '', - resolvedAbsolute = false; - - for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) { - var path = (i >= 0) ? arguments[i] : process.cwd(); - - // Skip empty and invalid entries - if (!util.isString(path)) { - throw new TypeError('Arguments to path.resolve must be strings'); - } else if (!path) { - continue; - } - - resolvedPath = path + '/' + resolvedPath; - resolvedAbsolute = path[0] === '/'; - } - - // At this point the path should be resolved to a full absolute path, but - // handle relative paths to be safe (might happen when process.cwd() fails) - - // Normalize the path - resolvedPath = normalizeArray(resolvedPath.split('/'), - !resolvedAbsolute).join('/'); - - return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.'; -}; - -// path.normalize(path) -// posix version -posix.normalize = function(path) { - var isAbsolute = posix.isAbsolute(path), - trailingSlash = path && path[path.length - 1] === '/'; - - // Normalize the path - path = normalizeArray(path.split('/'), !isAbsolute).join('/'); - - if (!path && !isAbsolute) { - path = '.'; - } - if (path && trailingSlash) { - path += '/'; - } - - return (isAbsolute ? '/' : '') + path; -}; - -// posix version -posix.isAbsolute = function(path) { - return path.charAt(0) === '/'; -}; - -// posix version -posix.join = function() { - var path = ''; - for (var i = 0; i < arguments.length; i++) { - var segment = arguments[i]; - if (!util.isString(segment)) { - throw new TypeError('Arguments to path.join must be strings'); - } - if (segment) { - if (!path) { - path += segment; - } else { - path += '/' + segment; - } - } - } - return posix.normalize(path); -}; - - -// path.relative(from, to) -// posix version -posix.relative = function(from, to) { - from = posix.resolve(from).substr(1); - to = posix.resolve(to).substr(1); - - var fromParts = trimArray(from.split('/')); - var toParts = trimArray(to.split('/')); - - var length = Math.min(fromParts.length, toParts.length); - var samePartsLength = length; - for (var i = 0; i < length; i++) { - if (fromParts[i] !== toParts[i]) { - samePartsLength = i; - break; - } - } - - var outputParts = []; - for (var i = samePartsLength; i < fromParts.length; i++) { - outputParts.push('..'); - } - - outputParts = outputParts.concat(toParts.slice(samePartsLength)); - - return outputParts.join('/'); -}; - - -posix._makeLong = function(path) { - return path; -}; - - -posix.dirname = function(path) { - var result = posixSplitPath(path), - root = result[0], - dir = result[1]; - - if (!root && !dir) { - // No dirname whatsoever - return '.'; - } - - if (dir) { - // It has a dirname, strip trailing slash - dir = dir.substr(0, dir.length - 1); - } - - return root + dir; -}; - - -posix.basename = function(path, ext) { - var f = posixSplitPath(path)[2]; - // TODO: make this comparison case-insensitive on windows? - if (ext && f.substr(-1 * ext.length) === ext) { - f = f.substr(0, f.length - ext.length); - } - return f; -}; - - -posix.extname = function(path) { - return posixSplitPath(path)[3]; -}; - - -posix.format = function(pathObject) { - if (!util.isObject(pathObject)) { - throw new TypeError( - "Parameter 'pathObject' must be an object, not " + typeof pathObject - ); - } - - var root = pathObject.root || ''; - - if (!util.isString(root)) { - throw new TypeError( - "'pathObject.root' must be a string or undefined, not " + - typeof pathObject.root - ); - } - - var dir = pathObject.dir ? pathObject.dir + posix.sep : ''; - var base = pathObject.base || ''; - return dir + base; -}; - - -posix.parse = function(pathString) { - if (!util.isString(pathString)) { - throw new TypeError( - "Parameter 'pathString' must be a string, not " + typeof pathString - ); - } - var allParts = posixSplitPath(pathString); - if (!allParts || allParts.length !== 4) { - throw new TypeError("Invalid path '" + pathString + "'"); - } - allParts[1] = allParts[1] || ''; - allParts[2] = allParts[2] || ''; - allParts[3] = allParts[3] || ''; - - return { - root: allParts[0], - dir: allParts[0] + allParts[1].slice(0, -1), - base: allParts[2], - ext: allParts[3], - name: allParts[2].slice(0, allParts[2].length - allParts[3].length) - }; -}; - - -posix.sep = '/'; -posix.delimiter = ':'; - - -if (isWindows) - module.exports = win32; -else /* posix */ - module.exports = posix; - -module.exports.posix = posix; -module.exports.win32 = win32; diff --git a/bin/node_modules/process/.eslintrc b/bin/node_modules/process/.eslintrc deleted file mode 100644 index 1e7aab7..0000000 --- a/bin/node_modules/process/.eslintrc +++ /dev/null @@ -1,21 +0,0 @@ -{ -extends: "eslint:recommended", - "env": { - "node": true, - "browser": true, - "es6" : true, - "mocha": true - }, - "rules": { - "indent": [2, 4], - "brace-style": [2, "1tbs"], - "quotes": [2, "single"], - "no-console": 0, - "no-shadow": 0, - "no-use-before-define": [2, "nofunc"], - "no-underscore-dangle": 0, - "no-constant-condition": 0, - "space-after-function-name": 0, - "consistent-return": 0 - } -} diff --git a/bin/node_modules/process/LICENSE b/bin/node_modules/process/LICENSE deleted file mode 100644 index b8c1246..0000000 --- a/bin/node_modules/process/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -(The MIT License) - -Copyright (c) 2013 Roman Shtylman - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -'Software'), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/bin/node_modules/process/README.md b/bin/node_modules/process/README.md deleted file mode 100644 index 6570729..0000000 --- a/bin/node_modules/process/README.md +++ /dev/null @@ -1,26 +0,0 @@ -# process - -```require('process');``` just like any other module. - -Works in node.js and browsers via the browser.js shim provided with the module. - -## browser implementation - -The goal of this module is not to be a full-fledged alternative to the builtin process module. This module mostly exists to provide the nextTick functionality and little more. We keep this module lean because it will often be included by default by tools like browserify when it detects a module has used the `process` global. - -It also exposes a "browser" member (i.e. `process.browser`) which is `true` in this implementation but `undefined` in node. This can be used in isomorphic code that adjusts it's behavior depending on which environment it's running in. - -If you are looking to provide other process methods, I suggest you monkey patch them onto the process global in your app. A list of user created patches is below. - -* [hrtime](https://github.com/kumavis/browser-process-hrtime) -* [stdout](https://github.com/kumavis/browser-stdout) - -## package manager notes - -If you are writing a bundler to package modules for client side use, make sure you use the ```browser``` field hint in package.json. - -See https://gist.github.com/4339901 for details. - -The [browserify](https://github.com/substack/node-browserify) module will properly handle this field when bundling your files. - - diff --git a/bin/node_modules/process/browser.js b/bin/node_modules/process/browser.js deleted file mode 100644 index d059362..0000000 --- a/bin/node_modules/process/browser.js +++ /dev/null @@ -1,184 +0,0 @@ -// shim for using process in browser -var process = module.exports = {}; - -// cached from whatever global is present so that test runners that stub it -// don't break things. But we need to wrap it in a try catch in case it is -// wrapped in strict mode code which doesn't define any globals. It's inside a -// function because try/catches deoptimize in certain engines. - -var cachedSetTimeout; -var cachedClearTimeout; - -function defaultSetTimout() { - throw new Error('setTimeout has not been defined'); -} -function defaultClearTimeout () { - throw new Error('clearTimeout has not been defined'); -} -(function () { - try { - if (typeof setTimeout === 'function') { - cachedSetTimeout = setTimeout; - } else { - cachedSetTimeout = defaultSetTimout; - } - } catch (e) { - cachedSetTimeout = defaultSetTimout; - } - try { - if (typeof clearTimeout === 'function') { - cachedClearTimeout = clearTimeout; - } else { - cachedClearTimeout = defaultClearTimeout; - } - } catch (e) { - cachedClearTimeout = defaultClearTimeout; - } -} ()) -function runTimeout(fun) { - if (cachedSetTimeout === setTimeout) { - //normal enviroments in sane situations - return setTimeout(fun, 0); - } - // if setTimeout wasn't available but was latter defined - if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) { - cachedSetTimeout = setTimeout; - return setTimeout(fun, 0); - } - try { - // when when somebody has screwed with setTimeout but no I.E. maddness - return cachedSetTimeout(fun, 0); - } catch(e){ - try { - // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally - return cachedSetTimeout.call(null, fun, 0); - } catch(e){ - // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error - return cachedSetTimeout.call(this, fun, 0); - } - } - - -} -function runClearTimeout(marker) { - if (cachedClearTimeout === clearTimeout) { - //normal enviroments in sane situations - return clearTimeout(marker); - } - // if clearTimeout wasn't available but was latter defined - if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) { - cachedClearTimeout = clearTimeout; - return clearTimeout(marker); - } - try { - // when when somebody has screwed with setTimeout but no I.E. maddness - return cachedClearTimeout(marker); - } catch (e){ - try { - // When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally - return cachedClearTimeout.call(null, marker); - } catch (e){ - // same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error. - // Some versions of I.E. have different rules for clearTimeout vs setTimeout - return cachedClearTimeout.call(this, marker); - } - } - - - -} -var queue = []; -var draining = false; -var currentQueue; -var queueIndex = -1; - -function cleanUpNextTick() { - if (!draining || !currentQueue) { - return; - } - draining = false; - if (currentQueue.length) { - queue = currentQueue.concat(queue); - } else { - queueIndex = -1; - } - if (queue.length) { - drainQueue(); - } -} - -function drainQueue() { - if (draining) { - return; - } - var timeout = runTimeout(cleanUpNextTick); - draining = true; - - var len = queue.length; - while(len) { - currentQueue = queue; - queue = []; - while (++queueIndex < len) { - if (currentQueue) { - currentQueue[queueIndex].run(); - } - } - queueIndex = -1; - len = queue.length; - } - currentQueue = null; - draining = false; - runClearTimeout(timeout); -} - -process.nextTick = function (fun) { - var args = new Array(arguments.length - 1); - if (arguments.length > 1) { - for (var i = 1; i < arguments.length; i++) { - args[i - 1] = arguments[i]; - } - } - queue.push(new Item(fun, args)); - if (queue.length === 1 && !draining) { - runTimeout(drainQueue); - } -}; - -// v8 likes predictible objects -function Item(fun, array) { - this.fun = fun; - this.array = array; -} -Item.prototype.run = function () { - this.fun.apply(null, this.array); -}; -process.title = 'browser'; -process.browser = true; -process.env = {}; -process.argv = []; -process.version = ''; // empty string to avoid regexp issues -process.versions = {}; - -function noop() {} - -process.on = noop; -process.addListener = noop; -process.once = noop; -process.off = noop; -process.removeListener = noop; -process.removeAllListeners = noop; -process.emit = noop; -process.prependListener = noop; -process.prependOnceListener = noop; - -process.listeners = function (name) { return [] } - -process.binding = function (name) { - throw new Error('process.binding is not supported'); -}; - -process.cwd = function () { return '/' }; -process.chdir = function (dir) { - throw new Error('process.chdir is not supported'); -}; -process.umask = function() { return 0; }; diff --git a/bin/node_modules/process/index.js b/bin/node_modules/process/index.js deleted file mode 100644 index 8d8ed7d..0000000 --- a/bin/node_modules/process/index.js +++ /dev/null @@ -1,2 +0,0 @@ -// for now just expose the builtin process global from node.js -module.exports = global.process; diff --git a/bin/node_modules/process/package.json b/bin/node_modules/process/package.json deleted file mode 100644 index d2cfaad..0000000 --- a/bin/node_modules/process/package.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "author": "Roman Shtylman ", - "name": "process", - "description": "process information for node.js and browsers", - "keywords": [ - "process" - ], - "scripts": { - "test": "mocha test.js", - "browser": "zuul --no-coverage --ui mocha-bdd --local 8080 -- test.js" - }, - "version": "0.11.10", - "repository": { - "type": "git", - "url": "git://github.com/shtylman/node-process.git" - }, - "license": "MIT", - "browser": "./browser.js", - "main": "./index.js", - "engines": { - "node": ">= 0.6.0" - }, - "devDependencies": { - "mocha": "2.2.1", - "zuul": "^3.10.3" - } -} diff --git a/bin/node_modules/process/test.js b/bin/node_modules/process/test.js deleted file mode 100644 index 8ba579c..0000000 --- a/bin/node_modules/process/test.js +++ /dev/null @@ -1,199 +0,0 @@ -var assert = require('assert'); -var ourProcess = require('./browser'); -describe('test against our process', function () { - test(ourProcess); -}); -if (!process.browser) { - describe('test against node', function () { - test(process); - }); - vmtest(); -} -function test (ourProcess) { - describe('test arguments', function () { - it ('works', function (done) { - var order = 0; - - - ourProcess.nextTick(function (num) { - assert.equal(num, order++, 'first one works'); - ourProcess.nextTick(function (num) { - assert.equal(num, order++, 'recursive one is 4th'); - }, 3); - }, 0); - ourProcess.nextTick(function (num) { - assert.equal(num, order++, 'second one starts'); - ourProcess.nextTick(function (num) { - assert.equal(num, order++, 'this is third'); - ourProcess.nextTick(function (num) { - assert.equal(num, order++, 'this is last'); - done(); - }, 5); - }, 4); - }, 1); - ourProcess.nextTick(function (num) { - - assert.equal(num, order++, '3rd schedualed happens after the error'); - }, 2); - }); - }); -if (!process.browser) { - describe('test errors', function (t) { - it ('works', function (done) { - var order = 0; - process.removeAllListeners('uncaughtException'); - process.once('uncaughtException', function(err) { - assert.equal(2, order++, 'error is third'); - ourProcess.nextTick(function () { - assert.equal(5, order++, 'schedualed in error is last'); - done(); - }); - }); - ourProcess.nextTick(function () { - assert.equal(0, order++, 'first one works'); - ourProcess.nextTick(function () { - assert.equal(4, order++, 'recursive one is 4th'); - }); - }); - ourProcess.nextTick(function () { - assert.equal(1, order++, 'second one starts'); - throw(new Error('an error is thrown')); - }); - ourProcess.nextTick(function () { - assert.equal(3, order++, '3rd schedualed happens after the error'); - }); - }); - }); -} - describe('rename globals', function (t) { - var oldTimeout = setTimeout; - var oldClear = clearTimeout; - - it('clearTimeout', function (done){ - - var ok = true; - clearTimeout = function () { - ok = false; - } - var ran = false; - function cleanup() { - clearTimeout = oldClear; - var err; - try { - assert.ok(ok, 'fake clearTimeout ran'); - assert.ok(ran, 'should have run'); - } catch (e) { - err = e; - } - done(err); - } - setTimeout(cleanup, 1000); - ourProcess.nextTick(function () { - ran = true; - }); - }); - it('just setTimeout', function (done){ - - - setTimeout = function () { - setTimeout = oldTimeout; - try { - assert.ok(false, 'fake setTimeout called') - } catch (e) { - done(e); - } - - } - - ourProcess.nextTick(function () { - setTimeout = oldTimeout; - done(); - }); - }); - }); -} -function vmtest() { - var vm = require('vm'); - var fs = require('fs'); - var process = fs.readFileSync('./browser.js', {encoding: 'utf8'}); - - - describe('should work in vm in strict mode with no globals', function () { - it('should parse', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'this.works = process.browser;'; - var script = new vm.Script(str); - var context = { - works: false - }; - script.runInNewContext(context); - assert.ok(context.works); - done(); - }); - it('setTimeout throws error', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'try {process.nextTick(function () {})} catch (e){this.works = e;}'; - var script = new vm.Script(str); - var context = { - works: false - }; - script.runInNewContext(context); - assert.ok(context.works); - done(); - }); - it('should generally work', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'process.nextTick(function () {assert.ok(true);done();})'; - var script = new vm.Script(str); - var context = { - clearTimeout: clearTimeout, - setTimeout: setTimeout, - done: done, - assert: assert - }; - script.runInNewContext(context); - }); - it('late defs setTimeout', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'var setTimeout = hiddenSetTimeout;process.nextTick(function () {assert.ok(true);done();})'; - var script = new vm.Script(str); - var context = { - clearTimeout: clearTimeout, - hiddenSetTimeout: setTimeout, - done: done, - assert: assert - }; - script.runInNewContext(context); - }); - it('late defs clearTimeout', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'var clearTimeout = hiddenClearTimeout;process.nextTick(function () {assert.ok(true);done();})'; - var script = new vm.Script(str); - var context = { - hiddenClearTimeout: clearTimeout, - setTimeout: setTimeout, - done: done, - assert: assert - }; - script.runInNewContext(context); - }); - it('late defs setTimeout and then redefine', function (done) { - var str = '"use strict";var module = {exports:{}};'; - str += process; - str += 'var setTimeout = hiddenSetTimeout;process.nextTick(function () {setTimeout = function (){throw new Error("foo")};hiddenSetTimeout(function(){process.nextTick(function (){assert.ok(true);done();});});});'; - var script = new vm.Script(str); - var context = { - clearTimeout: clearTimeout, - hiddenSetTimeout: setTimeout, - done: done, - assert: assert - }; - script.runInNewContext(context); - }); - }); -} diff --git a/bin/node_modules/util/LICENSE b/bin/node_modules/util/LICENSE deleted file mode 100644 index e3d4e69..0000000 --- a/bin/node_modules/util/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright Joyent, Inc. and other Node contributors. All rights reserved. -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to -deal in the Software without restriction, including without limitation the -rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS -IN THE SOFTWARE. diff --git a/bin/node_modules/util/README.md b/bin/node_modules/util/README.md deleted file mode 100644 index 1c473d2..0000000 --- a/bin/node_modules/util/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# util - -[![Build Status](https://travis-ci.org/defunctzombie/node-util.png?branch=master)](https://travis-ci.org/defunctzombie/node-util) - -node.js [util](http://nodejs.org/api/util.html) module as a module - -## install via [npm](npmjs.org) - -```shell -npm install util -``` - -## browser support - -This module also works in modern browsers. If you need legacy browser support you will need to polyfill ES5 features. diff --git a/bin/node_modules/util/package.json b/bin/node_modules/util/package.json deleted file mode 100644 index 13d19a0..0000000 --- a/bin/node_modules/util/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "author": { - "name": "Joyent", - "url": "http://www.joyent.com" - }, - "name": "util", - "description": "Node.JS util module", - "keywords": [ - "util" - ], - "version": "0.10.4", - "homepage": "https://github.com/defunctzombie/node-util", - "repository": { - "type": "git", - "url": "git://github.com/defunctzombie/node-util" - }, - "main": "./util.js", - "files": [ - "util.js", - "support" - ], - "scripts": { - "test": "node test/node/*.js && zuul test/browser/*.js" - }, - "dependencies": { - "inherits": "2.0.3" - }, - "license": "MIT", - "devDependencies": { - "zuul": "~1.0.9" - }, - "browser": { - "./support/isBuffer.js": "./support/isBufferBrowser.js" - } -} diff --git a/bin/node_modules/util/support/isBuffer.js b/bin/node_modules/util/support/isBuffer.js deleted file mode 100644 index ace9ac0..0000000 --- a/bin/node_modules/util/support/isBuffer.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = function isBuffer(arg) { - return arg instanceof Buffer; -} diff --git a/bin/node_modules/util/support/isBufferBrowser.js b/bin/node_modules/util/support/isBufferBrowser.js deleted file mode 100644 index 0e1bee1..0000000 --- a/bin/node_modules/util/support/isBufferBrowser.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = function isBuffer(arg) { - return arg && typeof arg === 'object' - && typeof arg.copy === 'function' - && typeof arg.fill === 'function' - && typeof arg.readUInt8 === 'function'; -} \ No newline at end of file diff --git a/bin/node_modules/util/util.js b/bin/node_modules/util/util.js deleted file mode 100644 index e0ea321..0000000 --- a/bin/node_modules/util/util.js +++ /dev/null @@ -1,586 +0,0 @@ -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var formatRegExp = /%[sdj%]/g; -exports.format = function(f) { - if (!isString(f)) { - var objects = []; - for (var i = 0; i < arguments.length; i++) { - objects.push(inspect(arguments[i])); - } - return objects.join(' '); - } - - var i = 1; - var args = arguments; - var len = args.length; - var str = String(f).replace(formatRegExp, function(x) { - if (x === '%%') return '%'; - if (i >= len) return x; - switch (x) { - case '%s': return String(args[i++]); - case '%d': return Number(args[i++]); - case '%j': - try { - return JSON.stringify(args[i++]); - } catch (_) { - return '[Circular]'; - } - default: - return x; - } - }); - for (var x = args[i]; i < len; x = args[++i]) { - if (isNull(x) || !isObject(x)) { - str += ' ' + x; - } else { - str += ' ' + inspect(x); - } - } - return str; -}; - - -// Mark that a method should not be used. -// Returns a modified function which warns once by default. -// If --no-deprecation is set, then it is a no-op. -exports.deprecate = function(fn, msg) { - // Allow for deprecating things in the process of starting up. - if (isUndefined(global.process)) { - return function() { - return exports.deprecate(fn, msg).apply(this, arguments); - }; - } - - if (process.noDeprecation === true) { - return fn; - } - - var warned = false; - function deprecated() { - if (!warned) { - if (process.throwDeprecation) { - throw new Error(msg); - } else if (process.traceDeprecation) { - console.trace(msg); - } else { - console.error(msg); - } - warned = true; - } - return fn.apply(this, arguments); - } - - return deprecated; -}; - - -var debugs = {}; -var debugEnviron; -exports.debuglog = function(set) { - if (isUndefined(debugEnviron)) - debugEnviron = process.env.NODE_DEBUG || ''; - set = set.toUpperCase(); - if (!debugs[set]) { - if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) { - var pid = process.pid; - debugs[set] = function() { - var msg = exports.format.apply(exports, arguments); - console.error('%s %d: %s', set, pid, msg); - }; - } else { - debugs[set] = function() {}; - } - } - return debugs[set]; -}; - - -/** - * Echos the value of a value. Trys to print the value out - * in the best way possible given the different types. - * - * @param {Object} obj The object to print out. - * @param {Object} opts Optional options object that alters the output. - */ -/* legacy: obj, showHidden, depth, colors*/ -function inspect(obj, opts) { - // default options - var ctx = { - seen: [], - stylize: stylizeNoColor - }; - // legacy... - if (arguments.length >= 3) ctx.depth = arguments[2]; - if (arguments.length >= 4) ctx.colors = arguments[3]; - if (isBoolean(opts)) { - // legacy... - ctx.showHidden = opts; - } else if (opts) { - // got an "options" object - exports._extend(ctx, opts); - } - // set default options - if (isUndefined(ctx.showHidden)) ctx.showHidden = false; - if (isUndefined(ctx.depth)) ctx.depth = 2; - if (isUndefined(ctx.colors)) ctx.colors = false; - if (isUndefined(ctx.customInspect)) ctx.customInspect = true; - if (ctx.colors) ctx.stylize = stylizeWithColor; - return formatValue(ctx, obj, ctx.depth); -} -exports.inspect = inspect; - - -// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics -inspect.colors = { - 'bold' : [1, 22], - 'italic' : [3, 23], - 'underline' : [4, 24], - 'inverse' : [7, 27], - 'white' : [37, 39], - 'grey' : [90, 39], - 'black' : [30, 39], - 'blue' : [34, 39], - 'cyan' : [36, 39], - 'green' : [32, 39], - 'magenta' : [35, 39], - 'red' : [31, 39], - 'yellow' : [33, 39] -}; - -// Don't use 'blue' not visible on cmd.exe -inspect.styles = { - 'special': 'cyan', - 'number': 'yellow', - 'boolean': 'yellow', - 'undefined': 'grey', - 'null': 'bold', - 'string': 'green', - 'date': 'magenta', - // "name": intentionally not styling - 'regexp': 'red' -}; - - -function stylizeWithColor(str, styleType) { - var style = inspect.styles[styleType]; - - if (style) { - return '\u001b[' + inspect.colors[style][0] + 'm' + str + - '\u001b[' + inspect.colors[style][1] + 'm'; - } else { - return str; - } -} - - -function stylizeNoColor(str, styleType) { - return str; -} - - -function arrayToHash(array) { - var hash = {}; - - array.forEach(function(val, idx) { - hash[val] = true; - }); - - return hash; -} - - -function formatValue(ctx, value, recurseTimes) { - // Provide a hook for user-specified inspect functions. - // Check that value is an object with an inspect function on it - if (ctx.customInspect && - value && - isFunction(value.inspect) && - // Filter out the util module, it's inspect function is special - value.inspect !== exports.inspect && - // Also filter out any prototype objects using the circular check. - !(value.constructor && value.constructor.prototype === value)) { - var ret = value.inspect(recurseTimes, ctx); - if (!isString(ret)) { - ret = formatValue(ctx, ret, recurseTimes); - } - return ret; - } - - // Primitive types cannot have properties - var primitive = formatPrimitive(ctx, value); - if (primitive) { - return primitive; - } - - // Look up the keys of the object. - var keys = Object.keys(value); - var visibleKeys = arrayToHash(keys); - - if (ctx.showHidden) { - keys = Object.getOwnPropertyNames(value); - } - - // IE doesn't make error fields non-enumerable - // http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx - if (isError(value) - && (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) { - return formatError(value); - } - - // Some type of object without properties can be shortcutted. - if (keys.length === 0) { - if (isFunction(value)) { - var name = value.name ? ': ' + value.name : ''; - return ctx.stylize('[Function' + name + ']', 'special'); - } - if (isRegExp(value)) { - return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); - } - if (isDate(value)) { - return ctx.stylize(Date.prototype.toString.call(value), 'date'); - } - if (isError(value)) { - return formatError(value); - } - } - - var base = '', array = false, braces = ['{', '}']; - - // Make Array say that they are Array - if (isArray(value)) { - array = true; - braces = ['[', ']']; - } - - // Make functions say that they are functions - if (isFunction(value)) { - var n = value.name ? ': ' + value.name : ''; - base = ' [Function' + n + ']'; - } - - // Make RegExps say that they are RegExps - if (isRegExp(value)) { - base = ' ' + RegExp.prototype.toString.call(value); - } - - // Make dates with properties first say the date - if (isDate(value)) { - base = ' ' + Date.prototype.toUTCString.call(value); - } - - // Make error with message first say the error - if (isError(value)) { - base = ' ' + formatError(value); - } - - if (keys.length === 0 && (!array || value.length == 0)) { - return braces[0] + base + braces[1]; - } - - if (recurseTimes < 0) { - if (isRegExp(value)) { - return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp'); - } else { - return ctx.stylize('[Object]', 'special'); - } - } - - ctx.seen.push(value); - - var output; - if (array) { - output = formatArray(ctx, value, recurseTimes, visibleKeys, keys); - } else { - output = keys.map(function(key) { - return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array); - }); - } - - ctx.seen.pop(); - - return reduceToSingleString(output, base, braces); -} - - -function formatPrimitive(ctx, value) { - if (isUndefined(value)) - return ctx.stylize('undefined', 'undefined'); - if (isString(value)) { - var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '') - .replace(/'/g, "\\'") - .replace(/\\"/g, '"') + '\''; - return ctx.stylize(simple, 'string'); - } - if (isNumber(value)) - return ctx.stylize('' + value, 'number'); - if (isBoolean(value)) - return ctx.stylize('' + value, 'boolean'); - // For some reason typeof null is "object", so special case here. - if (isNull(value)) - return ctx.stylize('null', 'null'); -} - - -function formatError(value) { - return '[' + Error.prototype.toString.call(value) + ']'; -} - - -function formatArray(ctx, value, recurseTimes, visibleKeys, keys) { - var output = []; - for (var i = 0, l = value.length; i < l; ++i) { - if (hasOwnProperty(value, String(i))) { - output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, - String(i), true)); - } else { - output.push(''); - } - } - keys.forEach(function(key) { - if (!key.match(/^\d+$/)) { - output.push(formatProperty(ctx, value, recurseTimes, visibleKeys, - key, true)); - } - }); - return output; -} - - -function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) { - var name, str, desc; - desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] }; - if (desc.get) { - if (desc.set) { - str = ctx.stylize('[Getter/Setter]', 'special'); - } else { - str = ctx.stylize('[Getter]', 'special'); - } - } else { - if (desc.set) { - str = ctx.stylize('[Setter]', 'special'); - } - } - if (!hasOwnProperty(visibleKeys, key)) { - name = '[' + key + ']'; - } - if (!str) { - if (ctx.seen.indexOf(desc.value) < 0) { - if (isNull(recurseTimes)) { - str = formatValue(ctx, desc.value, null); - } else { - str = formatValue(ctx, desc.value, recurseTimes - 1); - } - if (str.indexOf('\n') > -1) { - if (array) { - str = str.split('\n').map(function(line) { - return ' ' + line; - }).join('\n').substr(2); - } else { - str = '\n' + str.split('\n').map(function(line) { - return ' ' + line; - }).join('\n'); - } - } - } else { - str = ctx.stylize('[Circular]', 'special'); - } - } - if (isUndefined(name)) { - if (array && key.match(/^\d+$/)) { - return str; - } - name = JSON.stringify('' + key); - if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) { - name = name.substr(1, name.length - 2); - name = ctx.stylize(name, 'name'); - } else { - name = name.replace(/'/g, "\\'") - .replace(/\\"/g, '"') - .replace(/(^"|"$)/g, "'"); - name = ctx.stylize(name, 'string'); - } - } - - return name + ': ' + str; -} - - -function reduceToSingleString(output, base, braces) { - var numLinesEst = 0; - var length = output.reduce(function(prev, cur) { - numLinesEst++; - if (cur.indexOf('\n') >= 0) numLinesEst++; - return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1; - }, 0); - - if (length > 60) { - return braces[0] + - (base === '' ? '' : base + '\n ') + - ' ' + - output.join(',\n ') + - ' ' + - braces[1]; - } - - return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1]; -} - - -// NOTE: These type checking functions intentionally don't use `instanceof` -// because it is fragile and can be easily faked with `Object.create()`. -function isArray(ar) { - return Array.isArray(ar); -} -exports.isArray = isArray; - -function isBoolean(arg) { - return typeof arg === 'boolean'; -} -exports.isBoolean = isBoolean; - -function isNull(arg) { - return arg === null; -} -exports.isNull = isNull; - -function isNullOrUndefined(arg) { - return arg == null; -} -exports.isNullOrUndefined = isNullOrUndefined; - -function isNumber(arg) { - return typeof arg === 'number'; -} -exports.isNumber = isNumber; - -function isString(arg) { - return typeof arg === 'string'; -} -exports.isString = isString; - -function isSymbol(arg) { - return typeof arg === 'symbol'; -} -exports.isSymbol = isSymbol; - -function isUndefined(arg) { - return arg === void 0; -} -exports.isUndefined = isUndefined; - -function isRegExp(re) { - return isObject(re) && objectToString(re) === '[object RegExp]'; -} -exports.isRegExp = isRegExp; - -function isObject(arg) { - return typeof arg === 'object' && arg !== null; -} -exports.isObject = isObject; - -function isDate(d) { - return isObject(d) && objectToString(d) === '[object Date]'; -} -exports.isDate = isDate; - -function isError(e) { - return isObject(e) && - (objectToString(e) === '[object Error]' || e instanceof Error); -} -exports.isError = isError; - -function isFunction(arg) { - return typeof arg === 'function'; -} -exports.isFunction = isFunction; - -function isPrimitive(arg) { - return arg === null || - typeof arg === 'boolean' || - typeof arg === 'number' || - typeof arg === 'string' || - typeof arg === 'symbol' || // ES6 symbol - typeof arg === 'undefined'; -} -exports.isPrimitive = isPrimitive; - -exports.isBuffer = require('./support/isBuffer'); - -function objectToString(o) { - return Object.prototype.toString.call(o); -} - - -function pad(n) { - return n < 10 ? '0' + n.toString(10) : n.toString(10); -} - - -var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', - 'Oct', 'Nov', 'Dec']; - -// 26 Feb 16:19:34 -function timestamp() { - var d = new Date(); - var time = [pad(d.getHours()), - pad(d.getMinutes()), - pad(d.getSeconds())].join(':'); - return [d.getDate(), months[d.getMonth()], time].join(' '); -} - - -// log is just a thin wrapper to console.log that prepends a timestamp -exports.log = function() { - console.log('%s - %s', timestamp(), exports.format.apply(exports, arguments)); -}; - - -/** - * Inherit the prototype methods from one constructor into another. - * - * The Function.prototype.inherits from lang.js rewritten as a standalone - * function (not on Function.prototype). NOTE: If this file is to be loaded - * during bootstrapping this function needs to be rewritten using some native - * functions as prototype setup using normal JavaScript does not work as - * expected during bootstrapping (see mirror.js in r114903). - * - * @param {function} ctor Constructor function which needs to inherit the - * prototype. - * @param {function} superCtor Constructor function to inherit prototype from. - */ -exports.inherits = require('inherits'); - -exports._extend = function(origin, add) { - // Don't do anything if add isn't an object - if (!add || !isObject(add)) return origin; - - var keys = Object.keys(add); - var i = keys.length; - while (i--) { - origin[keys[i]] = add[keys[i]]; - } - return origin; -}; - -function hasOwnProperty(obj, prop) { - return Object.prototype.hasOwnProperty.call(obj, prop); -} diff --git a/bin/node_modules/yaml/LICENSE b/bin/node_modules/yaml/LICENSE deleted file mode 100644 index e060aaa..0000000 --- a/bin/node_modules/yaml/LICENSE +++ /dev/null @@ -1,13 +0,0 @@ -Copyright Eemeli Aro - -Permission to use, copy, modify, and/or distribute this software for any purpose -with or without fee is hereby granted, provided that the above copyright notice -and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF -THIS SOFTWARE. diff --git a/bin/node_modules/yaml/README.md b/bin/node_modules/yaml/README.md deleted file mode 100644 index 25c273d..0000000 --- a/bin/node_modules/yaml/README.md +++ /dev/null @@ -1,155 +0,0 @@ -# YAML - -`yaml` is a definitive library for [YAML](https://yaml.org/), the human friendly data serialization standard. -This library: - -- Supports both YAML 1.1 and YAML 1.2 and all common data schemas, -- Passes all of the [yaml-test-suite](https://github.com/yaml/yaml-test-suite) tests, -- Can accept any string as input without throwing, parsing as much YAML out of it as it can, and -- Supports parsing, modifying, and writing YAML comments and blank lines. - -The library is released under the ISC open source license, and the code is [available on GitHub](https://github.com/eemeli/yaml/). -It has no external dependencies and runs on Node.js as well as modern browsers. - -For the purposes of versioning, any changes that break any of the documented endpoints or APIs will be considered semver-major breaking changes. -Undocumented library internals may change between minor versions, and previous APIs may be deprecated (but not removed). - -The minimum supported TypeScript version of the included typings is 3.9; -for use in earlier versions you may need to set `skipLibCheck: true` in your config. -This requirement may be updated between minor versions of the library. - -For more information, see the project's documentation site: [**eemeli.org/yaml**](https://eemeli.org/yaml/) - -To install: - -```sh -npm install yaml -``` - -**Note:** These docs are for `yaml@2`. For v1, see the [v1.10.0 tag](https://github.com/eemeli/yaml/tree/v1.10.0) for the source and [eemeli.org/yaml/v1](https://eemeli.org/yaml/v1/) for the documentation. - -The development and maintenance of this library is [sponsored](https://github.com/sponsors/eemeli) by: - - -Scipress - - -## API Overview - -The API provided by `yaml` has three layers, depending on how deep you need to go: [Parse & Stringify](https://eemeli.org/yaml/#parse-amp-stringify), [Documents](https://eemeli.org/yaml/#documents), and the underlying [Lexer/Parser/Composer](https://eemeli.org/yaml/#parsing-yaml). -The first has the simplest API and "just works", the second gets you all the bells and whistles supported by the library along with a decent [AST](https://eemeli.org/yaml/#content-nodes), and the third lets you get progressively closer to YAML source, if that's your thing. - -A [command-line tool](https://eemeli.org/yaml/#command-line-tool) is also included. - -```js -import { parse, stringify } from 'yaml' -// or -import YAML from 'yaml' -// or -const YAML = require('yaml') -``` - -### Parse & Stringify - -- [`parse(str, reviver?, options?): value`](https://eemeli.org/yaml/#yaml-parse) -- [`stringify(value, replacer?, options?): string`](https://eemeli.org/yaml/#yaml-stringify) - -### Documents - -- [`Document`](https://eemeli.org/yaml/#documents) - - [`constructor(value, replacer?, options?)`](https://eemeli.org/yaml/#creating-documents) - - [`#anchors`](https://eemeli.org/yaml/#working-with-anchors) - - [`#contents`](https://eemeli.org/yaml/#content-nodes) - - [`#directives`](https://eemeli.org/yaml/#stream-directives) - - [`#errors`](https://eemeli.org/yaml/#errors) - - [`#warnings`](https://eemeli.org/yaml/#errors) -- [`isDocument(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`parseAllDocuments(str, options?): Document[]`](https://eemeli.org/yaml/#parsing-documents) -- [`parseDocument(str, options?): Document`](https://eemeli.org/yaml/#parsing-documents) - -### Content Nodes - -- [`isAlias(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isCollection(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isMap(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isNode(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isPair(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isScalar(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`isSeq(foo): boolean`](https://eemeli.org/yaml/#identifying-node-types) -- [`new Scalar(value)`](https://eemeli.org/yaml/#scalar-values) -- [`new YAMLMap()`](https://eemeli.org/yaml/#collections) -- [`new YAMLSeq()`](https://eemeli.org/yaml/#collections) -- [`doc.createAlias(node, name?): Alias`](https://eemeli.org/yaml/#working-with-anchors) -- [`doc.createNode(value, options?): Node`](https://eemeli.org/yaml/#creating-nodes) -- [`doc.createPair(key, value): Pair`](https://eemeli.org/yaml/#creating-nodes) -- [`visit(node, visitor)`](https://eemeli.org/yaml/#finding-and-modifying-nodes) - -### Parsing YAML - -- [`new Lexer().lex(src)`](https://eemeli.org/yaml/#lexer) -- [`new Parser(onNewLine?).parse(src)`](https://eemeli.org/yaml/#parser) -- [`new Composer(options?).compose(tokens)`](https://eemeli.org/yaml/#composer) - -## YAML.parse - -```yaml -# file.yml -YAML: - - A human-readable data serialization language - - https://en.wikipedia.org/wiki/YAML -yaml: - - A complete JavaScript implementation - - https://www.npmjs.com/package/yaml -``` - -```js -import fs from 'fs' -import YAML from 'yaml' - -YAML.parse('3.14159') -// 3.14159 - -YAML.parse('[ true, false, maybe, null ]\n') -// [ true, false, 'maybe', null ] - -const file = fs.readFileSync('./file.yml', 'utf8') -YAML.parse(file) -// { YAML: -// [ 'A human-readable data serialization language', -// 'https://en.wikipedia.org/wiki/YAML' ], -// yaml: -// [ 'A complete JavaScript implementation', -// 'https://www.npmjs.com/package/yaml' ] } -``` - -## YAML.stringify - -```js -import YAML from 'yaml' - -YAML.stringify(3.14159) -// '3.14159\n' - -YAML.stringify([true, false, 'maybe', null]) -// `- true -// - false -// - maybe -// - null -// ` - -YAML.stringify({ number: 3, plain: 'string', block: 'two\nlines\n' }) -// `number: 3 -// plain: string -// block: | -// two -// lines -// ` -``` - ---- - -Browser testing provided by: - - -BrowserStack - diff --git a/bin/node_modules/yaml/bin.mjs b/bin/node_modules/yaml/bin.mjs deleted file mode 100755 index 7504ae1..0000000 --- a/bin/node_modules/yaml/bin.mjs +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env node - -import { UserError, cli, help } from './dist/cli.mjs' - -cli(process.stdin, error => { - if (error instanceof UserError) { - if (error.code === UserError.ARGS) console.error(`${help}\n`) - console.error(error.message) - process.exitCode = error.code - } else if (error) throw error -}) diff --git a/bin/node_modules/yaml/browser/dist/compose/compose-collection.js b/bin/node_modules/yaml/browser/dist/compose/compose-collection.js deleted file mode 100644 index 8d4507f..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/compose-collection.js +++ /dev/null @@ -1,76 +0,0 @@ -import { isNode } from '../nodes/identity.js'; -import { Scalar } from '../nodes/Scalar.js'; -import { YAMLMap } from '../nodes/YAMLMap.js'; -import { YAMLSeq } from '../nodes/YAMLSeq.js'; -import { resolveBlockMap } from './resolve-block-map.js'; -import { resolveBlockSeq } from './resolve-block-seq.js'; -import { resolveFlowCollection } from './resolve-flow-collection.js'; - -function resolveCollection(CN, ctx, token, onError, tagName, tag) { - const coll = token.type === 'block-map' - ? resolveBlockMap(CN, ctx, token, onError, tag) - : token.type === 'block-seq' - ? resolveBlockSeq(CN, ctx, token, onError, tag) - : resolveFlowCollection(CN, ctx, token, onError, tag); - const Coll = coll.constructor; - // If we got a tagName matching the class, or the tag name is '!', - // then use the tagName from the node class used to create it. - if (tagName === '!' || tagName === Coll.tagName) { - coll.tag = Coll.tagName; - return coll; - } - if (tagName) - coll.tag = tagName; - return coll; -} -function composeCollection(CN, ctx, token, tagToken, onError) { - const tagName = !tagToken - ? null - : ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); - const expType = token.type === 'block-map' - ? 'map' - : token.type === 'block-seq' - ? 'seq' - : token.start.source === '{' - ? 'map' - : 'seq'; - // shortcut: check if it's a generic YAMLMap or YAMLSeq - // before jumping into the custom tag logic. - if (!tagToken || - !tagName || - tagName === '!' || - (tagName === YAMLMap.tagName && expType === 'map') || - (tagName === YAMLSeq.tagName && expType === 'seq') || - !expType) { - return resolveCollection(CN, ctx, token, onError, tagName); - } - let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType); - if (!tag) { - const kt = ctx.schema.knownTags[tagName]; - if (kt && kt.collection === expType) { - ctx.schema.tags.push(Object.assign({}, kt, { default: false })); - tag = kt; - } - else { - if (kt?.collection) { - onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true); - } - else { - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); - } - return resolveCollection(CN, ctx, token, onError, tagName); - } - } - const coll = resolveCollection(CN, ctx, token, onError, tagName, tag); - const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll; - const node = isNode(res) - ? res - : new Scalar(res); - node.range = coll.range; - node.tag = tagName; - if (tag?.format) - node.format = tag.format; - return node; -} - -export { composeCollection }; diff --git a/bin/node_modules/yaml/browser/dist/compose/compose-doc.js b/bin/node_modules/yaml/browser/dist/compose/compose-doc.js deleted file mode 100644 index d36c380..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/compose-doc.js +++ /dev/null @@ -1,41 +0,0 @@ -import { Document } from '../doc/Document.js'; -import { composeNode, composeEmptyNode } from './compose-node.js'; -import { resolveEnd } from './resolve-end.js'; -import { resolveProps } from './resolve-props.js'; - -function composeDoc(options, directives, { offset, start, value, end }, onError) { - const opts = Object.assign({ _directives: directives }, options); - const doc = new Document(undefined, opts); - const ctx = { - atRoot: true, - directives: doc.directives, - options: doc.options, - schema: doc.schema - }; - const props = resolveProps(start, { - indicator: 'doc-start', - next: value ?? end?.[0], - offset, - onError, - startOnNewline: true - }); - if (props.found) { - doc.directives.docStart = true; - if (value && - (value.type === 'block-map' || value.type === 'block-seq') && - !props.hasNewline) - onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); - } - // @ts-expect-error If Contents is set, let's trust the user - doc.contents = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, start, null, props, onError); - const contentEnd = doc.contents.range[2]; - const re = resolveEnd(end, contentEnd, false, onError); - if (re.comment) - doc.comment = re.comment; - doc.range = [offset, contentEnd, re.offset]; - return doc; -} - -export { composeDoc }; diff --git a/bin/node_modules/yaml/browser/dist/compose/compose-node.js b/bin/node_modules/yaml/browser/dist/compose/compose-node.js deleted file mode 100644 index 39d98e3..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/compose-node.js +++ /dev/null @@ -1,92 +0,0 @@ -import { Alias } from '../nodes/Alias.js'; -import { composeCollection } from './compose-collection.js'; -import { composeScalar } from './compose-scalar.js'; -import { resolveEnd } from './resolve-end.js'; -import { emptyScalarPosition } from './util-empty-scalar-position.js'; - -const CN = { composeNode, composeEmptyNode }; -function composeNode(ctx, token, props, onError) { - const { spaceBefore, comment, anchor, tag } = props; - let node; - let isSrcToken = true; - switch (token.type) { - case 'alias': - node = composeAlias(ctx, token, onError); - if (anchor || tag) - onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); - break; - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'block-scalar': - node = composeScalar(ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - case 'block-map': - case 'block-seq': - case 'flow-collection': - node = composeCollection(CN, ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - default: { - const message = token.type === 'error' - ? token.message - : `Unsupported token (type: ${token.type})`; - onError(token, 'UNEXPECTED_TOKEN', message); - node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); - isSrcToken = false; - } - } - if (anchor && node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - if (token.type === 'scalar' && token.source === '') - node.comment = comment; - else - node.commentBefore = comment; - } - // @ts-expect-error Type checking misses meaning of isSrcToken - if (ctx.options.keepSourceTokens && isSrcToken) - node.srcToken = token; - return node; -} -function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) { - const token = { - type: 'scalar', - offset: emptyScalarPosition(offset, before, pos), - indent: -1, - source: '' - }; - const node = composeScalar(ctx, token, tag, onError); - if (anchor) { - node.anchor = anchor.source.substring(1); - if (node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - } - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - node.comment = comment; - node.range[2] = end; - } - return node; -} -function composeAlias({ options }, { offset, source, end }, onError) { - const alias = new Alias(source.substring(1)); - if (alias.source === '') - onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); - if (alias.source.endsWith(':')) - onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); - const valueEnd = offset + source.length; - const re = resolveEnd(end, valueEnd, options.strict, onError); - alias.range = [offset, valueEnd, re.offset]; - if (re.comment) - alias.comment = re.comment; - return alias; -} - -export { composeEmptyNode, composeNode }; diff --git a/bin/node_modules/yaml/browser/dist/compose/compose-scalar.js b/bin/node_modules/yaml/browser/dist/compose/compose-scalar.js deleted file mode 100644 index fb4447d..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/compose-scalar.js +++ /dev/null @@ -1,80 +0,0 @@ -import { SCALAR, isScalar } from '../nodes/identity.js'; -import { Scalar } from '../nodes/Scalar.js'; -import { resolveBlockScalar } from './resolve-block-scalar.js'; -import { resolveFlowScalar } from './resolve-flow-scalar.js'; - -function composeScalar(ctx, token, tagToken, onError) { - const { value, type, comment, range } = token.type === 'block-scalar' - ? resolveBlockScalar(token, ctx.options.strict, onError) - : resolveFlowScalar(token, ctx.options.strict, onError); - const tagName = tagToken - ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) - : null; - const tag = tagToken && tagName - ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError) - : token.type === 'scalar' - ? findScalarTagByTest(ctx, value, token, onError) - : ctx.schema[SCALAR]; - let scalar; - try { - const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); - scalar = isScalar(res) ? res : new Scalar(res); - } - catch (error) { - const msg = error instanceof Error ? error.message : String(error); - onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); - scalar = new Scalar(value); - } - scalar.range = range; - scalar.source = value; - if (type) - scalar.type = type; - if (tagName) - scalar.tag = tagName; - if (tag.format) - scalar.format = tag.format; - if (comment) - scalar.comment = comment; - return scalar; -} -function findScalarTagByName(schema, value, tagName, tagToken, onError) { - if (tagName === '!') - return schema[SCALAR]; // non-specific tag - const matchWithTest = []; - for (const tag of schema.tags) { - if (!tag.collection && tag.tag === tagName) { - if (tag.default && tag.test) - matchWithTest.push(tag); - else - return tag; - } - } - for (const tag of matchWithTest) - if (tag.test?.test(value)) - return tag; - const kt = schema.knownTags[tagName]; - if (kt && !kt.collection) { - // Ensure that the known tag is available for stringifying, - // but does not get used by default. - schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); - return kt; - } - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); - return schema[SCALAR]; -} -function findScalarTagByTest({ directives, schema }, value, token, onError) { - const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[SCALAR]; - if (schema.compat) { - const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? - schema[SCALAR]; - if (tag.tag !== compat.tag) { - const ts = directives.tagString(tag.tag); - const cs = directives.tagString(compat.tag); - const msg = `Value may be parsed as either ${ts} or ${cs}`; - onError(token, 'TAG_RESOLVE_FAILED', msg, true); - } - } - return tag; -} - -export { composeScalar }; diff --git a/bin/node_modules/yaml/browser/dist/compose/composer.js b/bin/node_modules/yaml/browser/dist/compose/composer.js deleted file mode 100644 index 01b387f..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/composer.js +++ /dev/null @@ -1,217 +0,0 @@ -import { Directives } from '../doc/directives.js'; -import { Document } from '../doc/Document.js'; -import { YAMLWarning, YAMLParseError } from '../errors.js'; -import { isCollection, isPair } from '../nodes/identity.js'; -import { composeDoc } from './compose-doc.js'; -import { resolveEnd } from './resolve-end.js'; - -function getErrorPos(src) { - if (typeof src === 'number') - return [src, src + 1]; - if (Array.isArray(src)) - return src.length === 2 ? src : [src[0], src[1]]; - const { offset, source } = src; - return [offset, offset + (typeof source === 'string' ? source.length : 1)]; -} -function parsePrelude(prelude) { - let comment = ''; - let atComment = false; - let afterEmptyLine = false; - for (let i = 0; i < prelude.length; ++i) { - const source = prelude[i]; - switch (source[0]) { - case '#': - comment += - (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + - (source.substring(1) || ' '); - atComment = true; - afterEmptyLine = false; - break; - case '%': - if (prelude[i + 1]?.[0] !== '#') - i += 1; - atComment = false; - break; - default: - // This may be wrong after doc-end, but in that case it doesn't matter - if (!atComment) - afterEmptyLine = true; - atComment = false; - } - } - return { comment, afterEmptyLine }; -} -/** - * Compose a stream of CST nodes into a stream of YAML Documents. - * - * ```ts - * import { Composer, Parser } from 'yaml' - * - * const src: string = ... - * const tokens = new Parser().parse(src) - * const docs = new Composer().compose(tokens) - * ``` - */ -class Composer { - constructor(options = {}) { - this.doc = null; - this.atDirectives = false; - this.prelude = []; - this.errors = []; - this.warnings = []; - this.onError = (source, code, message, warning) => { - const pos = getErrorPos(source); - if (warning) - this.warnings.push(new YAMLWarning(pos, code, message)); - else - this.errors.push(new YAMLParseError(pos, code, message)); - }; - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - this.directives = new Directives({ version: options.version || '1.2' }); - this.options = options; - } - decorate(doc, afterDoc) { - const { comment, afterEmptyLine } = parsePrelude(this.prelude); - //console.log({ dc: doc.comment, prelude, comment }) - if (comment) { - const dc = doc.contents; - if (afterDoc) { - doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; - } - else if (afterEmptyLine || doc.directives.docStart || !dc) { - doc.commentBefore = comment; - } - else if (isCollection(dc) && !dc.flow && dc.items.length > 0) { - let it = dc.items[0]; - if (isPair(it)) - it = it.key; - const cb = it.commentBefore; - it.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - else { - const cb = dc.commentBefore; - dc.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - } - if (afterDoc) { - Array.prototype.push.apply(doc.errors, this.errors); - Array.prototype.push.apply(doc.warnings, this.warnings); - } - else { - doc.errors = this.errors; - doc.warnings = this.warnings; - } - this.prelude = []; - this.errors = []; - this.warnings = []; - } - /** - * Current stream status information. - * - * Mostly useful at the end of input for an empty stream. - */ - streamInfo() { - return { - comment: parsePrelude(this.prelude).comment, - directives: this.directives, - errors: this.errors, - warnings: this.warnings - }; - } - /** - * Compose tokens into documents. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *compose(tokens, forceDoc = false, endOffset = -1) { - for (const token of tokens) - yield* this.next(token); - yield* this.end(forceDoc, endOffset); - } - /** Advance the composer by one CST token. */ - *next(token) { - switch (token.type) { - case 'directive': - this.directives.add(token.source, (offset, message, warning) => { - const pos = getErrorPos(token); - pos[0] += offset; - this.onError(pos, 'BAD_DIRECTIVE', message, warning); - }); - this.prelude.push(token.source); - this.atDirectives = true; - break; - case 'document': { - const doc = composeDoc(this.options, this.directives, token, this.onError); - if (this.atDirectives && !doc.directives.docStart) - this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); - this.decorate(doc, false); - if (this.doc) - yield this.doc; - this.doc = doc; - this.atDirectives = false; - break; - } - case 'byte-order-mark': - case 'space': - break; - case 'comment': - case 'newline': - this.prelude.push(token.source); - break; - case 'error': { - const msg = token.source - ? `${token.message}: ${JSON.stringify(token.source)}` - : token.message; - const error = new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); - if (this.atDirectives || !this.doc) - this.errors.push(error); - else - this.doc.errors.push(error); - break; - } - case 'doc-end': { - if (!this.doc) { - const msg = 'Unexpected doc-end without preceding document'; - this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); - break; - } - this.doc.directives.docEnd = true; - const end = resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); - this.decorate(this.doc, true); - if (end.comment) { - const dc = this.doc.comment; - this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; - } - this.doc.range[2] = end.offset; - break; - } - default: - this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); - } - } - /** - * Call at end of input to yield any remaining document. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *end(forceDoc = false, endOffset = -1) { - if (this.doc) { - this.decorate(this.doc, true); - yield this.doc; - this.doc = null; - } - else if (forceDoc) { - const opts = Object.assign({ _directives: this.directives }, this.options); - const doc = new Document(undefined, opts); - if (this.atDirectives) - this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); - doc.range = [0, endOffset, endOffset]; - this.decorate(doc, false); - yield doc; - } - } -} - -export { Composer }; diff --git a/bin/node_modules/yaml/browser/dist/compose/resolve-block-map.js b/bin/node_modules/yaml/browser/dist/compose/resolve-block-map.js deleted file mode 100644 index 679b3de..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/resolve-block-map.js +++ /dev/null @@ -1,111 +0,0 @@ -import { Pair } from '../nodes/Pair.js'; -import { YAMLMap } from '../nodes/YAMLMap.js'; -import { resolveProps } from './resolve-props.js'; -import { containsNewline } from './util-contains-newline.js'; -import { flowIndentCheck } from './util-flow-indent-check.js'; -import { mapIncludes } from './util-map-includes.js'; - -const startColMsg = 'All mapping items must start at the same column'; -function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) { - const NodeClass = tag?.nodeClass ?? YAMLMap; - const map = new NodeClass(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bm.offset; - let commentEnd = null; - for (const collItem of bm.items) { - const { start, key, sep, value } = collItem; - // key properties - const keyProps = resolveProps(start, { - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: true - }); - const implicitKey = !keyProps.found; - if (implicitKey) { - if (key) { - if (key.type === 'block-seq') - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); - else if ('indent' in key && key.indent !== bm.indent) - onError(offset, 'BAD_INDENT', startColMsg); - } - if (!keyProps.anchor && !keyProps.tag && !sep) { - commentEnd = keyProps.end; - if (keyProps.comment) { - if (map.comment) - map.comment += '\n' + keyProps.comment; - else - map.comment = keyProps.comment; - } - continue; - } - if (keyProps.hasNewlineAfterProp || containsNewline(key)) { - onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); - } - } - else if (keyProps.found?.indent !== bm.indent) { - onError(offset, 'BAD_INDENT', startColMsg); - } - // key value - const keyStart = keyProps.end; - const keyNode = key - ? composeNode(ctx, key, keyProps, onError) - : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); - if (ctx.schema.compat) - flowIndentCheck(bm.indent, key, onError); - if (mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - // value properties - const valueProps = resolveProps(sep ?? [], { - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: !key || key.type === 'block-scalar' - }); - offset = valueProps.end; - if (valueProps.found) { - if (implicitKey) { - if (value?.type === 'block-map' && !valueProps.hasNewline) - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); - if (ctx.options.strict && - keyProps.start < valueProps.found.offset - 1024) - onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); - if (ctx.schema.compat) - flowIndentCheck(bm.indent, value, onError); - offset = valueNode.range[2]; - const pair = new Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - else { - // key with no value - if (implicitKey) - onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); - if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair(keyNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - } - if (commentEnd && commentEnd < offset) - onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content'); - map.range = [bm.offset, offset, commentEnd ?? offset]; - return map; -} - -export { resolveBlockMap }; diff --git a/bin/node_modules/yaml/browser/dist/compose/resolve-block-scalar.js b/bin/node_modules/yaml/browser/dist/compose/resolve-block-scalar.js deleted file mode 100644 index 592d807..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/resolve-block-scalar.js +++ /dev/null @@ -1,194 +0,0 @@ -import { Scalar } from '../nodes/Scalar.js'; - -function resolveBlockScalar(scalar, strict, onError) { - const start = scalar.offset; - const header = parseBlockScalarHeader(scalar, strict, onError); - if (!header) - return { value: '', type: null, comment: '', range: [start, start, start] }; - const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL; - const lines = scalar.source ? splitLines(scalar.source) : []; - // determine the end of content & start of chomping - let chompStart = lines.length; - for (let i = lines.length - 1; i >= 0; --i) { - const content = lines[i][1]; - if (content === '' || content === '\r') - chompStart = i; - else - break; - } - // shortcut for empty contents - if (chompStart === 0) { - const value = header.chomp === '+' && lines.length > 0 - ? '\n'.repeat(Math.max(1, lines.length - 1)) - : ''; - let end = start + header.length; - if (scalar.source) - end += scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; - } - // find the indentation level to trim from start - let trimIndent = scalar.indent + header.indent; - let offset = scalar.offset + header.length; - let contentStart = 0; - for (let i = 0; i < chompStart; ++i) { - const [indent, content] = lines[i]; - if (content === '' || content === '\r') { - if (header.indent === 0 && indent.length > trimIndent) - trimIndent = indent.length; - } - else { - if (indent.length < trimIndent) { - const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; - onError(offset + indent.length, 'MISSING_CHAR', message); - } - if (header.indent === 0) - trimIndent = indent.length; - contentStart = i; - break; - } - offset += indent.length + content.length + 1; - } - // include trailing more-indented empty lines in content - for (let i = lines.length - 1; i >= chompStart; --i) { - if (lines[i][0].length > trimIndent) - chompStart = i + 1; - } - let value = ''; - let sep = ''; - let prevMoreIndented = false; - // leading whitespace is kept intact - for (let i = 0; i < contentStart; ++i) - value += lines[i][0].slice(trimIndent) + '\n'; - for (let i = contentStart; i < chompStart; ++i) { - let [indent, content] = lines[i]; - offset += indent.length + content.length + 1; - const crlf = content[content.length - 1] === '\r'; - if (crlf) - content = content.slice(0, -1); - /* istanbul ignore if already caught in lexer */ - if (content && indent.length < trimIndent) { - const src = header.indent - ? 'explicit indentation indicator' - : 'first line'; - const message = `Block scalar lines must not be less indented than their ${src}`; - onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); - indent = ''; - } - if (type === Scalar.BLOCK_LITERAL) { - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - } - else if (indent.length > trimIndent || content[0] === '\t') { - // more-indented content within a folded block - if (sep === ' ') - sep = '\n'; - else if (!prevMoreIndented && sep === '\n') - sep = '\n\n'; - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - prevMoreIndented = true; - } - else if (content === '') { - // empty line - if (sep === '\n') - value += '\n'; - else - sep = '\n'; - } - else { - value += sep + content; - sep = ' '; - prevMoreIndented = false; - } - } - switch (header.chomp) { - case '-': - break; - case '+': - for (let i = chompStart; i < lines.length; ++i) - value += '\n' + lines[i][0].slice(trimIndent); - if (value[value.length - 1] !== '\n') - value += '\n'; - break; - default: - value += '\n'; - } - const end = start + header.length + scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; -} -function parseBlockScalarHeader({ offset, props }, strict, onError) { - /* istanbul ignore if should not happen */ - if (props[0].type !== 'block-scalar-header') { - onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); - return null; - } - const { source } = props[0]; - const mode = source[0]; - let indent = 0; - let chomp = ''; - let error = -1; - for (let i = 1; i < source.length; ++i) { - const ch = source[i]; - if (!chomp && (ch === '-' || ch === '+')) - chomp = ch; - else { - const n = Number(ch); - if (!indent && n) - indent = n; - else if (error === -1) - error = offset + i; - } - } - if (error !== -1) - onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); - let hasSpace = false; - let comment = ''; - let length = source.length; - for (let i = 1; i < props.length; ++i) { - const token = props[i]; - switch (token.type) { - case 'space': - hasSpace = true; - // fallthrough - case 'newline': - length += token.source.length; - break; - case 'comment': - if (strict && !hasSpace) { - const message = 'Comments must be separated from other tokens by white space characters'; - onError(token, 'MISSING_CHAR', message); - } - length += token.source.length; - comment = token.source.substring(1); - break; - case 'error': - onError(token, 'UNEXPECTED_TOKEN', token.message); - length += token.source.length; - break; - /* istanbul ignore next should not happen */ - default: { - const message = `Unexpected token in block scalar header: ${token.type}`; - onError(token, 'UNEXPECTED_TOKEN', message); - const ts = token.source; - if (ts && typeof ts === 'string') - length += ts.length; - } - } - } - return { mode, indent, chomp, comment, length }; -} -/** @returns Array of lines split up as `[indent, content]` */ -function splitLines(source) { - const split = source.split(/\n( *)/); - const first = split[0]; - const m = first.match(/^( *)/); - const line0 = m?.[1] - ? [m[1], first.slice(m[1].length)] - : ['', first]; - const lines = [line0]; - for (let i = 1; i < split.length; i += 2) - lines.push([split[i], split[i + 1]]); - return lines; -} - -export { resolveBlockScalar }; diff --git a/bin/node_modules/yaml/browser/dist/compose/resolve-block-seq.js b/bin/node_modules/yaml/browser/dist/compose/resolve-block-seq.js deleted file mode 100644 index 3241ca7..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/resolve-block-seq.js +++ /dev/null @@ -1,46 +0,0 @@ -import { YAMLSeq } from '../nodes/YAMLSeq.js'; -import { resolveProps } from './resolve-props.js'; -import { flowIndentCheck } from './util-flow-indent-check.js'; - -function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) { - const NodeClass = tag?.nodeClass ?? YAMLSeq; - const seq = new NodeClass(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bs.offset; - let commentEnd = null; - for (const { start, value } of bs.items) { - const props = resolveProps(start, { - indicator: 'seq-item-ind', - next: value, - offset, - onError, - startOnNewline: true - }); - if (!props.found) { - if (props.anchor || props.tag || value) { - if (value && value.type === 'block-seq') - onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column'); - else - onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); - } - else { - commentEnd = props.end; - if (props.comment) - seq.comment = props.comment; - continue; - } - } - const node = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, start, null, props, onError); - if (ctx.schema.compat) - flowIndentCheck(bs.indent, value, onError); - offset = node.range[2]; - seq.items.push(node); - } - seq.range = [bs.offset, offset, commentEnd ?? offset]; - return seq; -} - -export { resolveBlockSeq }; diff --git a/bin/node_modules/yaml/browser/dist/compose/resolve-end.js b/bin/node_modules/yaml/browser/dist/compose/resolve-end.js deleted file mode 100644 index d5c65d7..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/resolve-end.js +++ /dev/null @@ -1,37 +0,0 @@ -function resolveEnd(end, offset, reqSpace, onError) { - let comment = ''; - if (end) { - let hasSpace = false; - let sep = ''; - for (const token of end) { - const { source, type } = token; - switch (type) { - case 'space': - hasSpace = true; - break; - case 'comment': { - if (reqSpace && !hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += sep + cb; - sep = ''; - break; - } - case 'newline': - if (comment) - sep += source; - hasSpace = true; - break; - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); - } - offset += source.length; - } - } - return { comment, offset }; -} - -export { resolveEnd }; diff --git a/bin/node_modules/yaml/browser/dist/compose/resolve-flow-collection.js b/bin/node_modules/yaml/browser/dist/compose/resolve-flow-collection.js deleted file mode 100644 index fdf929a..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/resolve-flow-collection.js +++ /dev/null @@ -1,199 +0,0 @@ -import { isPair } from '../nodes/identity.js'; -import { Pair } from '../nodes/Pair.js'; -import { YAMLMap } from '../nodes/YAMLMap.js'; -import { YAMLSeq } from '../nodes/YAMLSeq.js'; -import { resolveEnd } from './resolve-end.js'; -import { resolveProps } from './resolve-props.js'; -import { containsNewline } from './util-contains-newline.js'; -import { mapIncludes } from './util-map-includes.js'; - -const blockMsg = 'Block collections are not allowed within flow collections'; -const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); -function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) { - const isMap = fc.start.source === '{'; - const fcName = isMap ? 'flow map' : 'flow sequence'; - const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap : YAMLSeq)); - const coll = new NodeClass(ctx.schema); - coll.flow = true; - const atRoot = ctx.atRoot; - if (atRoot) - ctx.atRoot = false; - let offset = fc.offset + fc.start.source.length; - for (let i = 0; i < fc.items.length; ++i) { - const collItem = fc.items[i]; - const { start, key, sep, value } = collItem; - const props = resolveProps(start, { - flow: fcName, - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: false - }); - if (!props.found) { - if (!props.anchor && !props.tag && !sep && !value) { - if (i === 0 && props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - else if (i < fc.items.length - 1) - onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); - if (props.comment) { - if (coll.comment) - coll.comment += '\n' + props.comment; - else - coll.comment = props.comment; - } - offset = props.end; - continue; - } - if (!isMap && ctx.options.strict && containsNewline(key)) - onError(key, // checked by containsNewline() - 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - } - if (i === 0) { - if (props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - } - else { - if (!props.comma) - onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); - if (props.comment) { - let prevItemComment = ''; - loop: for (const st of start) { - switch (st.type) { - case 'comma': - case 'space': - break; - case 'comment': - prevItemComment = st.source.substring(1); - break loop; - default: - break loop; - } - } - if (prevItemComment) { - let prev = coll.items[coll.items.length - 1]; - if (isPair(prev)) - prev = prev.value ?? prev.key; - if (prev.comment) - prev.comment += '\n' + prevItemComment; - else - prev.comment = prevItemComment; - props.comment = props.comment.substring(prevItemComment.length + 1); - } - } - } - if (!isMap && !sep && !props.found) { - // item is a value in a seq - // → key & sep are empty, start does not include ? or : - const valueNode = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, sep, null, props, onError); - coll.items.push(valueNode); - offset = valueNode.range[2]; - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else { - // item is a key+value pair - // key value - const keyStart = props.end; - const keyNode = key - ? composeNode(ctx, key, props, onError) - : composeEmptyNode(ctx, keyStart, start, null, props, onError); - if (isBlock(key)) - onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); - // value properties - const valueProps = resolveProps(sep ?? [], { - flow: fcName, - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: false - }); - if (valueProps.found) { - if (!isMap && !props.found && ctx.options.strict) { - if (sep) - for (const st of sep) { - if (st === valueProps.found) - break; - if (st.type === 'newline') { - onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - break; - } - } - if (props.start < valueProps.found.offset - 1024) - onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); - } - } - else if (value) { - if ('source' in value && value.source && value.source[0] === ':') - onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); - else - onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : valueProps.found - ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) - : null; - if (valueNode) { - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - if (isMap) { - const map = coll; - if (mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - map.items.push(pair); - } - else { - const map = new YAMLMap(ctx.schema); - map.flow = true; - map.items.push(pair); - coll.items.push(map); - } - offset = valueNode ? valueNode.range[2] : valueProps.end; - } - } - const expectedEnd = isMap ? '}' : ']'; - const [ce, ...ee] = fc.end; - let cePos = offset; - if (ce && ce.source === expectedEnd) - cePos = ce.offset + ce.source.length; - else { - const name = fcName[0].toUpperCase() + fcName.substring(1); - const msg = atRoot - ? `${name} must end with a ${expectedEnd}` - : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; - onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); - if (ce && ce.source.length !== 1) - ee.unshift(ce); - } - if (ee.length > 0) { - const end = resolveEnd(ee, cePos, ctx.options.strict, onError); - if (end.comment) { - if (coll.comment) - coll.comment += '\n' + end.comment; - else - coll.comment = end.comment; - } - coll.range = [fc.offset, cePos, end.offset]; - } - else { - coll.range = [fc.offset, cePos, cePos]; - } - return coll; -} - -export { resolveFlowCollection }; diff --git a/bin/node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js b/bin/node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js deleted file mode 100644 index ebb9fd4..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js +++ /dev/null @@ -1,223 +0,0 @@ -import { Scalar } from '../nodes/Scalar.js'; -import { resolveEnd } from './resolve-end.js'; - -function resolveFlowScalar(scalar, strict, onError) { - const { offset, type, source, end } = scalar; - let _type; - let value; - const _onError = (rel, code, msg) => onError(offset + rel, code, msg); - switch (type) { - case 'scalar': - _type = Scalar.PLAIN; - value = plainValue(source, _onError); - break; - case 'single-quoted-scalar': - _type = Scalar.QUOTE_SINGLE; - value = singleQuotedValue(source, _onError); - break; - case 'double-quoted-scalar': - _type = Scalar.QUOTE_DOUBLE; - value = doubleQuotedValue(source, _onError); - break; - /* istanbul ignore next should not happen */ - default: - onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); - return { - value: '', - type: null, - comment: '', - range: [offset, offset + source.length, offset + source.length] - }; - } - const valueEnd = offset + source.length; - const re = resolveEnd(end, valueEnd, strict, onError); - return { - value, - type: _type, - comment: re.comment, - range: [offset, valueEnd, re.offset] - }; -} -function plainValue(source, onError) { - let badChar = ''; - switch (source[0]) { - /* istanbul ignore next should not happen */ - case '\t': - badChar = 'a tab character'; - break; - case ',': - badChar = 'flow indicator character ,'; - break; - case '%': - badChar = 'directive indicator character %'; - break; - case '|': - case '>': { - badChar = `block scalar indicator ${source[0]}`; - break; - } - case '@': - case '`': { - badChar = `reserved character ${source[0]}`; - break; - } - } - if (badChar) - onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); - return foldLines(source); -} -function singleQuotedValue(source, onError) { - if (source[source.length - 1] !== "'" || source.length === 1) - onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); - return foldLines(source.slice(1, -1)).replace(/''/g, "'"); -} -function foldLines(source) { - /** - * The negative lookbehind here and in the `re` RegExp is to - * prevent causing a polynomial search time in certain cases. - * - * The try-catch is for Safari, which doesn't support this yet: - * https://caniuse.com/js-regexp-lookbehind - */ - let first, line; - try { - first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; - } - else { - res += ch; - } - } - if (source[source.length - 1] !== '"' || source.length === 1) - onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); - return res; -} -/** - * Fold a single newline into a space, multiple newlines to N - 1 newlines. - * Presumes `source[offset] === '\n'` - */ -function foldNewline(source, offset) { - let fold = ''; - let ch = source[offset + 1]; - while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { - if (ch === '\r' && source[offset + 2] !== '\n') - break; - if (ch === '\n') - fold += '\n'; - offset += 1; - ch = source[offset + 1]; - } - if (!fold) - fold = ' '; - return { fold, offset }; -} -const escapeCodes = { - '0': '\0', // null character - a: '\x07', // bell character - b: '\b', // backspace - e: '\x1b', // escape character - f: '\f', // form feed - n: '\n', // line feed - r: '\r', // carriage return - t: '\t', // horizontal tab - v: '\v', // vertical tab - N: '\u0085', // Unicode next line - _: '\u00a0', // Unicode non-breaking space - L: '\u2028', // Unicode line separator - P: '\u2029', // Unicode paragraph separator - ' ': ' ', - '"': '"', - '/': '/', - '\\': '\\', - '\t': '\t' -}; -function parseCharCode(source, offset, length, onError) { - const cc = source.substr(offset, length); - const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); - const code = ok ? parseInt(cc, 16) : NaN; - if (isNaN(code)) { - const raw = source.substr(offset - 2, length + 2); - onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); - return raw; - } - return String.fromCodePoint(code); -} - -export { resolveFlowScalar }; diff --git a/bin/node_modules/yaml/browser/dist/compose/resolve-props.js b/bin/node_modules/yaml/browser/dist/compose/resolve-props.js deleted file mode 100644 index ab30f56..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/resolve-props.js +++ /dev/null @@ -1,134 +0,0 @@ -function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) { - let spaceBefore = false; - let atNewline = startOnNewline; - let hasSpace = startOnNewline; - let comment = ''; - let commentSep = ''; - let hasNewline = false; - let hasNewlineAfterProp = false; - let reqSpace = false; - let anchor = null; - let tag = null; - let comma = null; - let found = null; - let start = null; - for (const token of tokens) { - if (reqSpace) { - if (token.type !== 'space' && - token.type !== 'newline' && - token.type !== 'comma') - onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - reqSpace = false; - } - switch (token.type) { - case 'space': - // At the doc level, tabs at line start may be parsed - // as leading white space rather than indentation. - // In a flow collection, only the parser handles indent. - if (!flow && - atNewline && - indicator !== 'doc-start' && - token.source[0] === '\t') - onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); - hasSpace = true; - break; - case 'comment': { - if (!hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = token.source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += commentSep + cb; - commentSep = ''; - atNewline = false; - break; - } - case 'newline': - if (atNewline) { - if (comment) - comment += token.source; - else - spaceBefore = true; - } - else - commentSep += token.source; - atNewline = true; - hasNewline = true; - if (anchor || tag) - hasNewlineAfterProp = true; - hasSpace = true; - break; - case 'anchor': - if (anchor) - onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); - if (token.source.endsWith(':')) - onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); - anchor = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - case 'tag': { - if (tag) - onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); - tag = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - } - case indicator: - // Could here handle preceding comments differently - if (anchor || tag) - onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); - if (found) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); - found = token; - atNewline = false; - hasSpace = false; - break; - case 'comma': - if (flow) { - if (comma) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); - comma = token; - atNewline = false; - hasSpace = false; - break; - } - // else fallthrough - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); - atNewline = false; - hasSpace = false; - } - } - const last = tokens[tokens.length - 1]; - const end = last ? last.offset + last.source.length : offset; - if (reqSpace && - next && - next.type !== 'space' && - next.type !== 'newline' && - next.type !== 'comma' && - (next.type !== 'scalar' || next.source !== '')) - onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - return { - comma, - found, - spaceBefore, - comment, - hasNewline, - hasNewlineAfterProp, - anchor, - tag, - end, - start: start ?? end - }; -} - -export { resolveProps }; diff --git a/bin/node_modules/yaml/browser/dist/compose/util-contains-newline.js b/bin/node_modules/yaml/browser/dist/compose/util-contains-newline.js deleted file mode 100644 index 2d65390..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/util-contains-newline.js +++ /dev/null @@ -1,34 +0,0 @@ -function containsNewline(key) { - if (!key) - return null; - switch (key.type) { - case 'alias': - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - if (key.source.includes('\n')) - return true; - if (key.end) - for (const st of key.end) - if (st.type === 'newline') - return true; - return false; - case 'flow-collection': - for (const it of key.items) { - for (const st of it.start) - if (st.type === 'newline') - return true; - if (it.sep) - for (const st of it.sep) - if (st.type === 'newline') - return true; - if (containsNewline(it.key) || containsNewline(it.value)) - return true; - } - return false; - default: - return true; - } -} - -export { containsNewline }; diff --git a/bin/node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js b/bin/node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js deleted file mode 100644 index ab6e0c9..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js +++ /dev/null @@ -1,27 +0,0 @@ -function emptyScalarPosition(offset, before, pos) { - if (before) { - if (pos === null) - pos = before.length; - for (let i = pos - 1; i >= 0; --i) { - let st = before[i]; - switch (st.type) { - case 'space': - case 'comment': - case 'newline': - offset -= st.source.length; - continue; - } - // Technically, an empty scalar is immediately after the last non-empty - // node, but it's more useful to place it after any whitespace. - st = before[++i]; - while (st?.type === 'space') { - offset += st.source.length; - st = before[++i]; - } - break; - } - } - return offset; -} - -export { emptyScalarPosition }; diff --git a/bin/node_modules/yaml/browser/dist/compose/util-flow-indent-check.js b/bin/node_modules/yaml/browser/dist/compose/util-flow-indent-check.js deleted file mode 100644 index c20e670..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/util-flow-indent-check.js +++ /dev/null @@ -1,15 +0,0 @@ -import { containsNewline } from './util-contains-newline.js'; - -function flowIndentCheck(indent, fc, onError) { - if (fc?.type === 'flow-collection') { - const end = fc.end[0]; - if (end.indent === indent && - (end.source === ']' || end.source === '}') && - containsNewline(fc)) { - const msg = 'Flow end indicator should be more indented than parent'; - onError(end, 'BAD_INDENT', msg, true); - } - } -} - -export { flowIndentCheck }; diff --git a/bin/node_modules/yaml/browser/dist/compose/util-map-includes.js b/bin/node_modules/yaml/browser/dist/compose/util-map-includes.js deleted file mode 100644 index 4e7c269..0000000 --- a/bin/node_modules/yaml/browser/dist/compose/util-map-includes.js +++ /dev/null @@ -1,17 +0,0 @@ -import { isScalar } from '../nodes/identity.js'; - -function mapIncludes(ctx, items, search) { - const { uniqueKeys } = ctx.options; - if (uniqueKeys === false) - return false; - const isEqual = typeof uniqueKeys === 'function' - ? uniqueKeys - : (a, b) => a === b || - (isScalar(a) && - isScalar(b) && - a.value === b.value && - !(a.value === '<<' && ctx.schema.merge)); - return items.some(pair => isEqual(pair.key, search)); -} - -export { mapIncludes }; diff --git a/bin/node_modules/yaml/browser/dist/doc/Document.js b/bin/node_modules/yaml/browser/dist/doc/Document.js deleted file mode 100644 index ad63426..0000000 --- a/bin/node_modules/yaml/browser/dist/doc/Document.js +++ /dev/null @@ -1,334 +0,0 @@ -import { Alias } from '../nodes/Alias.js'; -import { isEmptyPath, collectionFromPath } from '../nodes/Collection.js'; -import { NODE_TYPE, DOC, isNode, isCollection, isScalar } from '../nodes/identity.js'; -import { Pair } from '../nodes/Pair.js'; -import { toJS } from '../nodes/toJS.js'; -import { Schema } from '../schema/Schema.js'; -import { stringifyDocument } from '../stringify/stringifyDocument.js'; -import { anchorNames, findNewAnchor, createNodeAnchors } from './anchors.js'; -import { applyReviver } from './applyReviver.js'; -import { createNode } from './createNode.js'; -import { Directives } from './directives.js'; - -class Document { - constructor(value, replacer, options) { - /** A comment before this Document */ - this.commentBefore = null; - /** A comment immediately after this Document */ - this.comment = null; - /** Errors encountered during parsing. */ - this.errors = []; - /** Warnings encountered during parsing. */ - this.warnings = []; - Object.defineProperty(this, NODE_TYPE, { value: DOC }); - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const opt = Object.assign({ - intAsBigInt: false, - keepSourceTokens: false, - logLevel: 'warn', - prettyErrors: true, - strict: true, - uniqueKeys: true, - version: '1.2' - }, options); - this.options = opt; - let { version } = opt; - if (options?._directives) { - this.directives = options._directives.atDocument(); - if (this.directives.yaml.explicit) - version = this.directives.yaml.version; - } - else - this.directives = new Directives({ version }); - this.setSchema(version, options); - // @ts-expect-error We can't really know that this matches Contents. - this.contents = - value === undefined ? null : this.createNode(value, _replacer, options); - } - /** - * Create a deep copy of this Document and its contents. - * - * Custom Node values that inherit from `Object` still refer to their original instances. - */ - clone() { - const copy = Object.create(Document.prototype, { - [NODE_TYPE]: { value: DOC } - }); - copy.commentBefore = this.commentBefore; - copy.comment = this.comment; - copy.errors = this.errors.slice(); - copy.warnings = this.warnings.slice(); - copy.options = Object.assign({}, this.options); - if (this.directives) - copy.directives = this.directives.clone(); - copy.schema = this.schema.clone(); - // @ts-expect-error We can't really know that this matches Contents. - copy.contents = isNode(this.contents) - ? this.contents.clone(copy.schema) - : this.contents; - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** Adds a value to the document. */ - add(value) { - if (assertCollection(this.contents)) - this.contents.add(value); - } - /** Adds a value to the document. */ - addIn(path, value) { - if (assertCollection(this.contents)) - this.contents.addIn(path, value); - } - /** - * Create a new `Alias` node, ensuring that the target `node` has the required anchor. - * - * If `node` already has an anchor, `name` is ignored. - * Otherwise, the `node.anchor` value will be set to `name`, - * or if an anchor with that name is already present in the document, - * `name` will be used as a prefix for a new unique anchor. - * If `name` is undefined, the generated anchor will use 'a' as a prefix. - */ - createAlias(node, name) { - if (!node.anchor) { - const prev = anchorNames(this); - node.anchor = - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - !name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name; - } - return new Alias(node.anchor); - } - createNode(value, replacer, options) { - let _replacer = undefined; - if (typeof replacer === 'function') { - value = replacer.call({ '': value }, '', value); - _replacer = replacer; - } - else if (Array.isArray(replacer)) { - const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; - const asStr = replacer.filter(keyToStr).map(String); - if (asStr.length > 0) - replacer = replacer.concat(asStr); - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; - const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors(this, - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - anchorPrefix || 'a'); - const ctx = { - aliasDuplicateObjects: aliasDuplicateObjects ?? true, - keepUndefined: keepUndefined ?? false, - onAnchor, - onTagObj, - replacer: _replacer, - schema: this.schema, - sourceObjects - }; - const node = createNode(value, tag, ctx); - if (flow && isCollection(node)) - node.flow = true; - setAnchors(); - return node; - } - /** - * Convert a key and a value into a `Pair` using the current schema, - * recursively wrapping all values as `Scalar` or `Collection` nodes. - */ - createPair(key, value, options = {}) { - const k = this.createNode(key, null, options); - const v = this.createNode(value, null, options); - return new Pair(k, v); - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - delete(key) { - return assertCollection(this.contents) ? this.contents.delete(key) : false; - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - if (isEmptyPath(path)) { - if (this.contents == null) - return false; - // @ts-expect-error Presumed impossible if Strict extends false - this.contents = null; - return true; - } - return assertCollection(this.contents) - ? this.contents.deleteIn(path) - : false; - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - get(key, keepScalar) { - return isCollection(this.contents) - ? this.contents.get(key, keepScalar) - : undefined; - } - /** - * Returns item at `path`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - if (isEmptyPath(path)) - return !keepScalar && isScalar(this.contents) - ? this.contents.value - : this.contents; - return isCollection(this.contents) - ? this.contents.getIn(path, keepScalar) - : undefined; - } - /** - * Checks if the document includes a value with the key `key`. - */ - has(key) { - return isCollection(this.contents) ? this.contents.has(key) : false; - } - /** - * Checks if the document includes a value at `path`. - */ - hasIn(path) { - if (isEmptyPath(path)) - return this.contents !== undefined; - return isCollection(this.contents) ? this.contents.hasIn(path) : false; - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - set(key, value) { - if (this.contents == null) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = collectionFromPath(this.schema, [key], value); - } - else if (assertCollection(this.contents)) { - this.contents.set(key, value); - } - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - if (isEmptyPath(path)) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = value; - } - else if (this.contents == null) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = collectionFromPath(this.schema, Array.from(path), value); - } - else if (assertCollection(this.contents)) { - this.contents.setIn(path, value); - } - } - /** - * Change the YAML version and schema used by the document. - * A `null` version disables support for directives, explicit tags, anchors, and aliases. - * It also requires the `schema` option to be given as a `Schema` instance value. - * - * Overrides all previously set schema options. - */ - setSchema(version, options = {}) { - if (typeof version === 'number') - version = String(version); - let opt; - switch (version) { - case '1.1': - if (this.directives) - this.directives.yaml.version = '1.1'; - else - this.directives = new Directives({ version: '1.1' }); - opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' }; - break; - case '1.2': - case 'next': - if (this.directives) - this.directives.yaml.version = version; - else - this.directives = new Directives({ version }); - opt = { merge: false, resolveKnownTags: true, schema: 'core' }; - break; - case null: - if (this.directives) - delete this.directives; - opt = null; - break; - default: { - const sv = JSON.stringify(version); - throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); - } - } - // Not using `instanceof Schema` to allow for duck typing - if (options.schema instanceof Object) - this.schema = options.schema; - else if (opt) - this.schema = new Schema(Object.assign(opt, options)); - else - throw new Error(`With a null YAML version, the { schema: Schema } option is required`); - } - // json & jsonArg are only used from toJSON() - toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - const ctx = { - anchors: new Map(), - doc: this, - keep: !json, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 - }; - const res = toJS(this.contents, jsonArg ?? '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver(reviver, { '': res }, '', res) - : res; - } - /** - * A JSON representation of the document `contents`. - * - * @param jsonArg Used by `JSON.stringify` to indicate the array index or - * property name. - */ - toJSON(jsonArg, onAnchor) { - return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); - } - /** A YAML representation of the document. */ - toString(options = {}) { - if (this.errors.length > 0) - throw new Error('Document with errors cannot be stringified'); - if ('indent' in options && - (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { - const s = JSON.stringify(options.indent); - throw new Error(`"indent" option must be a positive integer, not ${s}`); - } - return stringifyDocument(this, options); - } -} -function assertCollection(contents) { - if (isCollection(contents)) - return true; - throw new Error('Expected a YAML collection as document contents'); -} - -export { Document }; diff --git a/bin/node_modules/yaml/browser/dist/doc/anchors.js b/bin/node_modules/yaml/browser/dist/doc/anchors.js deleted file mode 100644 index 03c2442..0000000 --- a/bin/node_modules/yaml/browser/dist/doc/anchors.js +++ /dev/null @@ -1,72 +0,0 @@ -import { isScalar, isCollection } from '../nodes/identity.js'; -import { visit } from '../visit.js'; - -/** - * Verify that the input string is a valid anchor. - * - * Will throw on errors. - */ -function anchorIsValid(anchor) { - if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { - const sa = JSON.stringify(anchor); - const msg = `Anchor must not contain whitespace or control characters: ${sa}`; - throw new Error(msg); - } - return true; -} -function anchorNames(root) { - const anchors = new Set(); - visit(root, { - Value(_key, node) { - if (node.anchor) - anchors.add(node.anchor); - } - }); - return anchors; -} -/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ -function findNewAnchor(prefix, exclude) { - for (let i = 1; true; ++i) { - const name = `${prefix}${i}`; - if (!exclude.has(name)) - return name; - } -} -function createNodeAnchors(doc, prefix) { - const aliasObjects = []; - const sourceObjects = new Map(); - let prevAnchors = null; - return { - onAnchor: (source) => { - aliasObjects.push(source); - if (!prevAnchors) - prevAnchors = anchorNames(doc); - const anchor = findNewAnchor(prefix, prevAnchors); - prevAnchors.add(anchor); - return anchor; - }, - /** - * With circular references, the source node is only resolved after all - * of its child nodes are. This is why anchors are set only after all of - * the nodes have been created. - */ - setAnchors: () => { - for (const source of aliasObjects) { - const ref = sourceObjects.get(source); - if (typeof ref === 'object' && - ref.anchor && - (isScalar(ref.node) || isCollection(ref.node))) { - ref.node.anchor = ref.anchor; - } - else { - const error = new Error('Failed to resolve repeated object (this should not happen)'); - error.source = source; - throw error; - } - } - }, - sourceObjects - }; -} - -export { anchorIsValid, anchorNames, createNodeAnchors, findNewAnchor }; diff --git a/bin/node_modules/yaml/browser/dist/doc/applyReviver.js b/bin/node_modules/yaml/browser/dist/doc/applyReviver.js deleted file mode 100644 index 0e6a93c..0000000 --- a/bin/node_modules/yaml/browser/dist/doc/applyReviver.js +++ /dev/null @@ -1,54 +0,0 @@ -/** - * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, - * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the - * 2021 edition: https://tc39.es/ecma262/#sec-json.parse - * - * Includes extensions for handling Map and Set objects. - */ -function applyReviver(reviver, obj, key, val) { - if (val && typeof val === 'object') { - if (Array.isArray(val)) { - for (let i = 0, len = val.length; i < len; ++i) { - const v0 = val[i]; - const v1 = applyReviver(reviver, val, String(i), v0); - if (v1 === undefined) - delete val[i]; - else if (v1 !== v0) - val[i] = v1; - } - } - else if (val instanceof Map) { - for (const k of Array.from(val.keys())) { - const v0 = val.get(k); - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - val.delete(k); - else if (v1 !== v0) - val.set(k, v1); - } - } - else if (val instanceof Set) { - for (const v0 of Array.from(val)) { - const v1 = applyReviver(reviver, val, v0, v0); - if (v1 === undefined) - val.delete(v0); - else if (v1 !== v0) { - val.delete(v0); - val.add(v1); - } - } - } - else { - for (const [k, v0] of Object.entries(val)) { - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - delete val[k]; - else if (v1 !== v0) - val[k] = v1; - } - } - } - return reviver.call(obj, key, val); -} - -export { applyReviver }; diff --git a/bin/node_modules/yaml/browser/dist/doc/createNode.js b/bin/node_modules/yaml/browser/dist/doc/createNode.js deleted file mode 100644 index 1392269..0000000 --- a/bin/node_modules/yaml/browser/dist/doc/createNode.js +++ /dev/null @@ -1,89 +0,0 @@ -import { Alias } from '../nodes/Alias.js'; -import { isNode, isPair, MAP, SEQ, isDocument } from '../nodes/identity.js'; -import { Scalar } from '../nodes/Scalar.js'; - -const defaultTagPrefix = 'tag:yaml.org,2002:'; -function findTagObject(value, tagName, tags) { - if (tagName) { - const match = tags.filter(t => t.tag === tagName); - const tagObj = match.find(t => !t.format) ?? match[0]; - if (!tagObj) - throw new Error(`Tag ${tagName} not found`); - return tagObj; - } - return tags.find(t => t.identify?.(value) && !t.format); -} -function createNode(value, tagName, ctx) { - if (isDocument(value)) - value = value.contents; - if (isNode(value)) - return value; - if (isPair(value)) { - const map = ctx.schema[MAP].createNode?.(ctx.schema, null, ctx); - map.items.push(value); - return map; - } - if (value instanceof String || - value instanceof Number || - value instanceof Boolean || - (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere - ) { - // https://tc39.es/ecma262/#sec-serializejsonproperty - value = value.valueOf(); - } - const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; - // Detect duplicate references to the same object & use Alias nodes for all - // after first. The `ref` wrapper allows for circular references to resolve. - let ref = undefined; - if (aliasDuplicateObjects && value && typeof value === 'object') { - ref = sourceObjects.get(value); - if (ref) { - if (!ref.anchor) - ref.anchor = onAnchor(value); - return new Alias(ref.anchor); - } - else { - ref = { anchor: null, node: null }; - sourceObjects.set(value, ref); - } - } - if (tagName?.startsWith('!!')) - tagName = defaultTagPrefix + tagName.slice(2); - let tagObj = findTagObject(value, tagName, schema.tags); - if (!tagObj) { - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - value = value.toJSON(); - } - if (!value || typeof value !== 'object') { - const node = new Scalar(value); - if (ref) - ref.node = node; - return node; - } - tagObj = - value instanceof Map - ? schema[MAP] - : Symbol.iterator in Object(value) - ? schema[SEQ] - : schema[MAP]; - } - if (onTagObj) { - onTagObj(tagObj); - delete ctx.onTagObj; - } - const node = tagObj?.createNode - ? tagObj.createNode(ctx.schema, value, ctx) - : typeof tagObj?.nodeClass?.from === 'function' - ? tagObj.nodeClass.from(ctx.schema, value, ctx) - : new Scalar(value); - if (tagName) - node.tag = tagName; - else if (!tagObj.default) - node.tag = tagObj.tag; - if (ref) - ref.node = node; - return node; -} - -export { createNode }; diff --git a/bin/node_modules/yaml/browser/dist/doc/directives.js b/bin/node_modules/yaml/browser/dist/doc/directives.js deleted file mode 100644 index c66e612..0000000 --- a/bin/node_modules/yaml/browser/dist/doc/directives.js +++ /dev/null @@ -1,176 +0,0 @@ -import { isNode } from '../nodes/identity.js'; -import { visit } from '../visit.js'; - -const escapeChars = { - '!': '%21', - ',': '%2C', - '[': '%5B', - ']': '%5D', - '{': '%7B', - '}': '%7D' -}; -const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); -class Directives { - constructor(yaml, tags) { - /** - * The directives-end/doc-start marker `---`. If `null`, a marker may still be - * included in the document's stringified representation. - */ - this.docStart = null; - /** The doc-end marker `...`. */ - this.docEnd = false; - this.yaml = Object.assign({}, Directives.defaultYaml, yaml); - this.tags = Object.assign({}, Directives.defaultTags, tags); - } - clone() { - const copy = new Directives(this.yaml, this.tags); - copy.docStart = this.docStart; - return copy; - } - /** - * During parsing, get a Directives instance for the current document and - * update the stream state according to the current version's spec. - */ - atDocument() { - const res = new Directives(this.yaml, this.tags); - switch (this.yaml.version) { - case '1.1': - this.atNextDocument = true; - break; - case '1.2': - this.atNextDocument = false; - this.yaml = { - explicit: Directives.defaultYaml.explicit, - version: '1.2' - }; - this.tags = Object.assign({}, Directives.defaultTags); - break; - } - return res; - } - /** - * @param onError - May be called even if the action was successful - * @returns `true` on success - */ - add(line, onError) { - if (this.atNextDocument) { - this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; - this.tags = Object.assign({}, Directives.defaultTags); - this.atNextDocument = false; - } - const parts = line.trim().split(/[ \t]+/); - const name = parts.shift(); - switch (name) { - case '%TAG': { - if (parts.length !== 2) { - onError(0, '%TAG directive should contain exactly two parts'); - if (parts.length < 2) - return false; - } - const [handle, prefix] = parts; - this.tags[handle] = prefix; - return true; - } - case '%YAML': { - this.yaml.explicit = true; - if (parts.length !== 1) { - onError(0, '%YAML directive should contain exactly one part'); - return false; - } - const [version] = parts; - if (version === '1.1' || version === '1.2') { - this.yaml.version = version; - return true; - } - else { - const isValid = /^\d+\.\d+$/.test(version); - onError(6, `Unsupported YAML version ${version}`, isValid); - return false; - } - } - default: - onError(0, `Unknown directive ${name}`, true); - return false; - } - } - /** - * Resolves a tag, matching handles to those defined in %TAG directives. - * - * @returns Resolved tag, which may also be the non-specific tag `'!'` or a - * `'!local'` tag, or `null` if unresolvable. - */ - tagName(source, onError) { - if (source === '!') - return '!'; // non-specific tag - if (source[0] !== '!') { - onError(`Not a valid tag: ${source}`); - return null; - } - if (source[1] === '<') { - const verbatim = source.slice(2, -1); - if (verbatim === '!' || verbatim === '!!') { - onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); - return null; - } - if (source[source.length - 1] !== '>') - onError('Verbatim tags must end with a >'); - return verbatim; - } - const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/s); - if (!suffix) - onError(`The ${source} tag has no suffix`); - const prefix = this.tags[handle]; - if (prefix) { - try { - return prefix + decodeURIComponent(suffix); - } - catch (error) { - onError(String(error)); - return null; - } - } - if (handle === '!') - return source; // local tag - onError(`Could not resolve tag: ${source}`); - return null; - } - /** - * Given a fully resolved tag, returns its printable string form, - * taking into account current tag prefixes and defaults. - */ - tagString(tag) { - for (const [handle, prefix] of Object.entries(this.tags)) { - if (tag.startsWith(prefix)) - return handle + escapeTagName(tag.substring(prefix.length)); - } - return tag[0] === '!' ? tag : `!<${tag}>`; - } - toString(doc) { - const lines = this.yaml.explicit - ? [`%YAML ${this.yaml.version || '1.2'}`] - : []; - const tagEntries = Object.entries(this.tags); - let tagNames; - if (doc && tagEntries.length > 0 && isNode(doc.contents)) { - const tags = {}; - visit(doc.contents, (_key, node) => { - if (isNode(node) && node.tag) - tags[node.tag] = true; - }); - tagNames = Object.keys(tags); - } - else - tagNames = []; - for (const [handle, prefix] of tagEntries) { - if (handle === '!!' && prefix === 'tag:yaml.org,2002:') - continue; - if (!doc || tagNames.some(tn => tn.startsWith(prefix))) - lines.push(`%TAG ${handle} ${prefix}`); - } - return lines.join('\n'); - } -} -Directives.defaultYaml = { explicit: false, version: '1.2' }; -Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; - -export { Directives }; diff --git a/bin/node_modules/yaml/browser/dist/errors.js b/bin/node_modules/yaml/browser/dist/errors.js deleted file mode 100644 index ad91290..0000000 --- a/bin/node_modules/yaml/browser/dist/errors.js +++ /dev/null @@ -1,57 +0,0 @@ -class YAMLError extends Error { - constructor(name, pos, code, message) { - super(); - this.name = name; - this.code = code; - this.message = message; - this.pos = pos; - } -} -class YAMLParseError extends YAMLError { - constructor(pos, code, message) { - super('YAMLParseError', pos, code, message); - } -} -class YAMLWarning extends YAMLError { - constructor(pos, code, message) { - super('YAMLWarning', pos, code, message); - } -} -const prettifyError = (src, lc) => (error) => { - if (error.pos[0] === -1) - return; - error.linePos = error.pos.map(pos => lc.linePos(pos)); - const { line, col } = error.linePos[0]; - error.message += ` at line ${line}, column ${col}`; - let ci = col - 1; - let lineStr = src - .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) - .replace(/[\n\r]+$/, ''); - // Trim to max 80 chars, keeping col position near the middle - if (ci >= 60 && lineStr.length > 80) { - const trimStart = Math.min(ci - 39, lineStr.length - 79); - lineStr = '…' + lineStr.substring(trimStart); - ci -= trimStart - 1; - } - if (lineStr.length > 80) - lineStr = lineStr.substring(0, 79) + '…'; - // Include previous line in context if pointing at line start - if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { - // Regexp won't match if start is trimmed - let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); - if (prev.length > 80) - prev = prev.substring(0, 79) + '…\n'; - lineStr = prev + lineStr; - } - if (/[^ ]/.test(lineStr)) { - let count = 1; - const end = error.linePos[1]; - if (end && end.line === line && end.col > col) { - count = Math.max(1, Math.min(end.col - col, 80 - ci)); - } - const pointer = ' '.repeat(ci) + '^'.repeat(count); - error.message += `:\n\n${lineStr}\n${pointer}\n`; - } -}; - -export { YAMLError, YAMLParseError, YAMLWarning, prettifyError }; diff --git a/bin/node_modules/yaml/browser/dist/index.js b/bin/node_modules/yaml/browser/dist/index.js deleted file mode 100644 index 097bf24..0000000 --- a/bin/node_modules/yaml/browser/dist/index.js +++ /dev/null @@ -1,17 +0,0 @@ -export { Composer } from './compose/composer.js'; -export { Document } from './doc/Document.js'; -export { Schema } from './schema/Schema.js'; -export { YAMLError, YAMLParseError, YAMLWarning } from './errors.js'; -export { Alias } from './nodes/Alias.js'; -export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq } from './nodes/identity.js'; -export { Pair } from './nodes/Pair.js'; -export { Scalar } from './nodes/Scalar.js'; -export { YAMLMap } from './nodes/YAMLMap.js'; -export { YAMLSeq } from './nodes/YAMLSeq.js'; -import * as cst from './parse/cst.js'; -export { cst as CST }; -export { Lexer } from './parse/lexer.js'; -export { LineCounter } from './parse/line-counter.js'; -export { Parser } from './parse/parser.js'; -export { parse, parseAllDocuments, parseDocument, stringify } from './public-api.js'; -export { visit, visitAsync } from './visit.js'; diff --git a/bin/node_modules/yaml/browser/dist/log.js b/bin/node_modules/yaml/browser/dist/log.js deleted file mode 100644 index c5f8998..0000000 --- a/bin/node_modules/yaml/browser/dist/log.js +++ /dev/null @@ -1,16 +0,0 @@ -function debug(logLevel, ...messages) { - if (logLevel === 'debug') - console.log(...messages); -} -function warn(logLevel, warning) { - if (logLevel === 'debug' || logLevel === 'warn') { - // https://github.com/typescript-eslint/typescript-eslint/issues/7478 - // eslint-disable-next-line @typescript-eslint/prefer-optional-chain - if (typeof process !== 'undefined' && process.emitWarning) - process.emitWarning(warning); - else - console.warn(warning); - } -} - -export { debug, warn }; diff --git a/bin/node_modules/yaml/browser/dist/node_modules/tslib/tslib.es6.js b/bin/node_modules/yaml/browser/dist/node_modules/tslib/tslib.es6.js deleted file mode 100644 index 9c794c5..0000000 --- a/bin/node_modules/yaml/browser/dist/node_modules/tslib/tslib.es6.js +++ /dev/null @@ -1,21 +0,0 @@ -/****************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -function __classPrivateFieldGet(receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); -} - -export { __classPrivateFieldGet }; diff --git a/bin/node_modules/yaml/browser/dist/nodes/Alias.js b/bin/node_modules/yaml/browser/dist/nodes/Alias.js deleted file mode 100644 index 8e34f09..0000000 --- a/bin/node_modules/yaml/browser/dist/nodes/Alias.js +++ /dev/null @@ -1,101 +0,0 @@ -import { anchorIsValid } from '../doc/anchors.js'; -import { visit } from '../visit.js'; -import { ALIAS, isAlias, isCollection, isPair } from './identity.js'; -import { NodeBase } from './Node.js'; -import { toJS } from './toJS.js'; - -class Alias extends NodeBase { - constructor(source) { - super(ALIAS); - this.source = source; - Object.defineProperty(this, 'tag', { - set() { - throw new Error('Alias nodes cannot have tags'); - } - }); - } - /** - * Resolve the value of this alias within `doc`, finding the last - * instance of the `source` anchor before this node. - */ - resolve(doc) { - let found = undefined; - visit(doc, { - Node: (_key, node) => { - if (node === this) - return visit.BREAK; - if (node.anchor === this.source) - found = node; - } - }); - return found; - } - toJSON(_arg, ctx) { - if (!ctx) - return { source: this.source }; - const { anchors, doc, maxAliasCount } = ctx; - const source = this.resolve(doc); - if (!source) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new ReferenceError(msg); - } - let data = anchors.get(source); - if (!data) { - // Resolve anchors for Node.prototype.toJS() - toJS(source, null, ctx); - data = anchors.get(source); - } - /* istanbul ignore if */ - if (!data || data.res === undefined) { - const msg = 'This should not happen: Alias anchor was not resolved?'; - throw new ReferenceError(msg); - } - if (maxAliasCount >= 0) { - data.count += 1; - if (data.aliasCount === 0) - data.aliasCount = getAliasCount(doc, source, anchors); - if (data.count * data.aliasCount > maxAliasCount) { - const msg = 'Excessive alias count indicates a resource exhaustion attack'; - throw new ReferenceError(msg); - } - } - return data.res; - } - toString(ctx, _onComment, _onChompKeep) { - const src = `*${this.source}`; - if (ctx) { - anchorIsValid(this.source); - if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new Error(msg); - } - if (ctx.implicitKey) - return `${src} `; - } - return src; - } -} -function getAliasCount(doc, node, anchors) { - if (isAlias(node)) { - const source = node.resolve(doc); - const anchor = anchors && source && anchors.get(source); - return anchor ? anchor.count * anchor.aliasCount : 0; - } - else if (isCollection(node)) { - let count = 0; - for (const item of node.items) { - const c = getAliasCount(doc, item, anchors); - if (c > count) - count = c; - } - return count; - } - else if (isPair(node)) { - const kc = getAliasCount(doc, node.key, anchors); - const vc = getAliasCount(doc, node.value, anchors); - return Math.max(kc, vc); - } - return 1; -} - -export { Alias }; diff --git a/bin/node_modules/yaml/browser/dist/nodes/Collection.js b/bin/node_modules/yaml/browser/dist/nodes/Collection.js deleted file mode 100644 index a5f89be..0000000 --- a/bin/node_modules/yaml/browser/dist/nodes/Collection.js +++ /dev/null @@ -1,148 +0,0 @@ -import { createNode } from '../doc/createNode.js'; -import { isNode, isPair, isCollection, isScalar } from './identity.js'; -import { NodeBase } from './Node.js'; - -function collectionFromPath(schema, path, value) { - let v = value; - for (let i = path.length - 1; i >= 0; --i) { - const k = path[i]; - if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { - const a = []; - a[k] = v; - v = a; - } - else { - v = new Map([[k, v]]); - } - } - return createNode(v, undefined, { - aliasDuplicateObjects: false, - keepUndefined: false, - onAnchor: () => { - throw new Error('This should not happen, please report a bug.'); - }, - schema, - sourceObjects: new Map() - }); -} -// Type guard is intentionally a little wrong so as to be more useful, -// as it does not cover untypable empty non-string iterables (e.g. []). -const isEmptyPath = (path) => path == null || - (typeof path === 'object' && !!path[Symbol.iterator]().next().done); -class Collection extends NodeBase { - constructor(type, schema) { - super(type); - Object.defineProperty(this, 'schema', { - value: schema, - configurable: true, - enumerable: false, - writable: true - }); - } - /** - * Create a copy of this collection. - * - * @param schema - If defined, overwrites the original's schema - */ - clone(schema) { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (schema) - copy.schema = schema; - copy.items = copy.items.map(it => isNode(it) || isPair(it) ? it.clone(schema) : it); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** - * Adds a value to the collection. For `!!map` and `!!omap` the value must - * be a Pair instance or a `{ key, value }` object, which may not have a key - * that already exists in the map. - */ - addIn(path, value) { - if (isEmptyPath(path)) - this.add(value); - else { - const [key, ...rest] = path; - const node = this.get(key, true); - if (isCollection(node)) - node.addIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.delete(key); - const node = this.get(key, true); - if (isCollection(node)) - return node.deleteIn(rest); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - const [key, ...rest] = path; - const node = this.get(key, true); - if (rest.length === 0) - return !keepScalar && isScalar(node) ? node.value : node; - else - return isCollection(node) ? node.getIn(rest, keepScalar) : undefined; - } - hasAllNullValues(allowScalar) { - return this.items.every(node => { - if (!isPair(node)) - return false; - const n = node.value; - return (n == null || - (allowScalar && - isScalar(n) && - n.value == null && - !n.commentBefore && - !n.comment && - !n.tag)); - }); - } - /** - * Checks if the collection includes a value with the key `key`. - */ - hasIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.has(key); - const node = this.get(key, true); - return isCollection(node) ? node.hasIn(rest) : false; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - const [key, ...rest] = path; - if (rest.length === 0) { - this.set(key, value); - } - else { - const node = this.get(key, true); - if (isCollection(node)) - node.setIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } -} -Collection.maxFlowStringSingleLineLength = 60; - -export { Collection, collectionFromPath, isEmptyPath }; diff --git a/bin/node_modules/yaml/browser/dist/nodes/Node.js b/bin/node_modules/yaml/browser/dist/nodes/Node.js deleted file mode 100644 index b0eb96b..0000000 --- a/bin/node_modules/yaml/browser/dist/nodes/Node.js +++ /dev/null @@ -1,38 +0,0 @@ -import { applyReviver } from '../doc/applyReviver.js'; -import { NODE_TYPE, isDocument } from './identity.js'; -import { toJS } from './toJS.js'; - -class NodeBase { - constructor(type) { - Object.defineProperty(this, NODE_TYPE, { value: type }); - } - /** Create a copy of this node. */ - clone() { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** A plain JavaScript representation of this node. */ - toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - if (!isDocument(doc)) - throw new TypeError('A document argument is required'); - const ctx = { - anchors: new Map(), - doc, - keep: true, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 - }; - const res = toJS(this, '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver(reviver, { '': res }, '', res) - : res; - } -} - -export { NodeBase }; diff --git a/bin/node_modules/yaml/browser/dist/nodes/Pair.js b/bin/node_modules/yaml/browser/dist/nodes/Pair.js deleted file mode 100644 index 6e419f6..0000000 --- a/bin/node_modules/yaml/browser/dist/nodes/Pair.js +++ /dev/null @@ -1,36 +0,0 @@ -import { createNode } from '../doc/createNode.js'; -import { stringifyPair } from '../stringify/stringifyPair.js'; -import { addPairToJSMap } from './addPairToJSMap.js'; -import { NODE_TYPE, PAIR, isNode } from './identity.js'; - -function createPair(key, value, ctx) { - const k = createNode(key, undefined, ctx); - const v = createNode(value, undefined, ctx); - return new Pair(k, v); -} -class Pair { - constructor(key, value = null) { - Object.defineProperty(this, NODE_TYPE, { value: PAIR }); - this.key = key; - this.value = value; - } - clone(schema) { - let { key, value } = this; - if (isNode(key)) - key = key.clone(schema); - if (isNode(value)) - value = value.clone(schema); - return new Pair(key, value); - } - toJSON(_, ctx) { - const pair = ctx?.mapAsMap ? new Map() : {}; - return addPairToJSMap(ctx, pair, this); - } - toString(ctx, onComment, onChompKeep) { - return ctx?.doc - ? stringifyPair(this, ctx, onComment, onChompKeep) - : JSON.stringify(this); - } -} - -export { Pair, createPair }; diff --git a/bin/node_modules/yaml/browser/dist/nodes/Scalar.js b/bin/node_modules/yaml/browser/dist/nodes/Scalar.js deleted file mode 100644 index a9f2673..0000000 --- a/bin/node_modules/yaml/browser/dist/nodes/Scalar.js +++ /dev/null @@ -1,24 +0,0 @@ -import { SCALAR } from './identity.js'; -import { NodeBase } from './Node.js'; -import { toJS } from './toJS.js'; - -const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); -class Scalar extends NodeBase { - constructor(value) { - super(SCALAR); - this.value = value; - } - toJSON(arg, ctx) { - return ctx?.keep ? this.value : toJS(this.value, arg, ctx); - } - toString() { - return String(this.value); - } -} -Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; -Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; -Scalar.PLAIN = 'PLAIN'; -Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; -Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; - -export { Scalar, isScalarValue }; diff --git a/bin/node_modules/yaml/browser/dist/nodes/YAMLMap.js b/bin/node_modules/yaml/browser/dist/nodes/YAMLMap.js deleted file mode 100644 index 5d88737..0000000 --- a/bin/node_modules/yaml/browser/dist/nodes/YAMLMap.js +++ /dev/null @@ -1,144 +0,0 @@ -import { stringifyCollection } from '../stringify/stringifyCollection.js'; -import { addPairToJSMap } from './addPairToJSMap.js'; -import { Collection } from './Collection.js'; -import { isPair, isScalar, MAP } from './identity.js'; -import { Pair, createPair } from './Pair.js'; -import { isScalarValue } from './Scalar.js'; - -function findPair(items, key) { - const k = isScalar(key) ? key.value : key; - for (const it of items) { - if (isPair(it)) { - if (it.key === key || it.key === k) - return it; - if (isScalar(it.key) && it.key.value === k) - return it; - } - } - return undefined; -} -class YAMLMap extends Collection { - static get tagName() { - return 'tag:yaml.org,2002:map'; - } - constructor(schema) { - super(MAP, schema); - this.items = []; - } - /** - * A generic collection parsing method that can be extended - * to other node classes that inherit from YAMLMap - */ - static from(schema, obj, ctx) { - const { keepUndefined, replacer } = ctx; - const map = new this(schema); - const add = (key, value) => { - if (typeof replacer === 'function') - value = replacer.call(obj, key, value); - else if (Array.isArray(replacer) && !replacer.includes(key)) - return; - if (value !== undefined || keepUndefined) - map.items.push(createPair(key, value, ctx)); - }; - if (obj instanceof Map) { - for (const [key, value] of obj) - add(key, value); - } - else if (obj && typeof obj === 'object') { - for (const key of Object.keys(obj)) - add(key, obj[key]); - } - if (typeof schema.sortMapEntries === 'function') { - map.items.sort(schema.sortMapEntries); - } - return map; - } - /** - * Adds a value to the collection. - * - * @param overwrite - If not set `true`, using a key that is already in the - * collection will throw. Otherwise, overwrites the previous value. - */ - add(pair, overwrite) { - let _pair; - if (isPair(pair)) - _pair = pair; - else if (!pair || typeof pair !== 'object' || !('key' in pair)) { - // In TypeScript, this never happens. - _pair = new Pair(pair, pair?.value); - } - else - _pair = new Pair(pair.key, pair.value); - const prev = findPair(this.items, _pair.key); - const sortEntries = this.schema?.sortMapEntries; - if (prev) { - if (!overwrite) - throw new Error(`Key ${_pair.key} already set`); - // For scalars, keep the old node & its comments and anchors - if (isScalar(prev.value) && isScalarValue(_pair.value)) - prev.value.value = _pair.value; - else - prev.value = _pair.value; - } - else if (sortEntries) { - const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); - if (i === -1) - this.items.push(_pair); - else - this.items.splice(i, 0, _pair); - } - else { - this.items.push(_pair); - } - } - delete(key) { - const it = findPair(this.items, key); - if (!it) - return false; - const del = this.items.splice(this.items.indexOf(it), 1); - return del.length > 0; - } - get(key, keepScalar) { - const it = findPair(this.items, key); - const node = it?.value; - return (!keepScalar && isScalar(node) ? node.value : node) ?? undefined; - } - has(key) { - return !!findPair(this.items, key); - } - set(key, value) { - this.add(new Pair(key, value), true); - } - /** - * @param ctx - Conversion context, originally set in Document#toJS() - * @param {Class} Type - If set, forces the returned collection type - * @returns Instance of Type, Map, or Object - */ - toJSON(_, ctx, Type) { - const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; - if (ctx?.onCreate) - ctx.onCreate(map); - for (const item of this.items) - addPairToJSMap(ctx, map, item); - return map; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - for (const item of this.items) { - if (!isPair(item)) - throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); - } - if (!ctx.allNullValues && this.hasAllNullValues(false)) - ctx = Object.assign({}, ctx, { allNullValues: true }); - return stringifyCollection(this, ctx, { - blockItemPrefix: '', - flowChars: { start: '{', end: '}' }, - itemIndent: ctx.indent || '', - onChompKeep, - onComment - }); - } -} - -export { YAMLMap, findPair }; diff --git a/bin/node_modules/yaml/browser/dist/nodes/YAMLSeq.js b/bin/node_modules/yaml/browser/dist/nodes/YAMLSeq.js deleted file mode 100644 index b80de40..0000000 --- a/bin/node_modules/yaml/browser/dist/nodes/YAMLSeq.js +++ /dev/null @@ -1,113 +0,0 @@ -import { createNode } from '../doc/createNode.js'; -import { stringifyCollection } from '../stringify/stringifyCollection.js'; -import { Collection } from './Collection.js'; -import { SEQ, isScalar } from './identity.js'; -import { isScalarValue } from './Scalar.js'; -import { toJS } from './toJS.js'; - -class YAMLSeq extends Collection { - static get tagName() { - return 'tag:yaml.org,2002:seq'; - } - constructor(schema) { - super(SEQ, schema); - this.items = []; - } - add(value) { - this.items.push(value); - } - /** - * Removes a value from the collection. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - * - * @returns `true` if the item was found and removed. - */ - delete(key) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return false; - const del = this.items.splice(idx, 1); - return del.length > 0; - } - get(key, keepScalar) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return undefined; - const it = this.items[idx]; - return !keepScalar && isScalar(it) ? it.value : it; - } - /** - * Checks if the collection includes a value with the key `key`. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - has(key) { - const idx = asItemIndex(key); - return typeof idx === 'number' && idx < this.items.length; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - * - * If `key` does not contain a representation of an integer, this will throw. - * It may be wrapped in a `Scalar`. - */ - set(key, value) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - throw new Error(`Expected a valid index, not ${key}.`); - const prev = this.items[idx]; - if (isScalar(prev) && isScalarValue(value)) - prev.value = value; - else - this.items[idx] = value; - } - toJSON(_, ctx) { - const seq = []; - if (ctx?.onCreate) - ctx.onCreate(seq); - let i = 0; - for (const item of this.items) - seq.push(toJS(item, String(i++), ctx)); - return seq; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - return stringifyCollection(this, ctx, { - blockItemPrefix: '- ', - flowChars: { start: '[', end: ']' }, - itemIndent: (ctx.indent || '') + ' ', - onChompKeep, - onComment - }); - } - static from(schema, obj, ctx) { - const { replacer } = ctx; - const seq = new this(schema); - if (obj && Symbol.iterator in Object(obj)) { - let i = 0; - for (let it of obj) { - if (typeof replacer === 'function') { - const key = obj instanceof Set ? it : String(i++); - it = replacer.call(obj, key, it); - } - seq.items.push(createNode(it, undefined, ctx)); - } - } - return seq; - } -} -function asItemIndex(key) { - let idx = isScalar(key) ? key.value : key; - if (idx && typeof idx === 'string') - idx = Number(idx); - return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 - ? idx - : null; -} - -export { YAMLSeq }; diff --git a/bin/node_modules/yaml/browser/dist/nodes/addPairToJSMap.js b/bin/node_modules/yaml/browser/dist/nodes/addPairToJSMap.js deleted file mode 100644 index 680dcea..0000000 --- a/bin/node_modules/yaml/browser/dist/nodes/addPairToJSMap.js +++ /dev/null @@ -1,104 +0,0 @@ -import { warn } from '../log.js'; -import { createStringifyContext } from '../stringify/stringify.js'; -import { isAlias, isSeq, isScalar, isMap, isNode } from './identity.js'; -import { Scalar } from './Scalar.js'; -import { toJS } from './toJS.js'; - -const MERGE_KEY = '<<'; -function addPairToJSMap(ctx, map, { key, value }) { - if (ctx?.doc.schema.merge && isMergeKey(key)) { - value = isAlias(value) ? value.resolve(ctx.doc) : value; - if (isSeq(value)) - for (const it of value.items) - mergeToJSMap(ctx, map, it); - else if (Array.isArray(value)) - for (const it of value) - mergeToJSMap(ctx, map, it); - else - mergeToJSMap(ctx, map, value); - } - else { - const jsKey = toJS(key, '', ctx); - if (map instanceof Map) { - map.set(jsKey, toJS(value, jsKey, ctx)); - } - else if (map instanceof Set) { - map.add(jsKey); - } - else { - const stringKey = stringifyKey(key, jsKey, ctx); - const jsValue = toJS(value, stringKey, ctx); - if (stringKey in map) - Object.defineProperty(map, stringKey, { - value: jsValue, - writable: true, - enumerable: true, - configurable: true - }); - else - map[stringKey] = jsValue; - } - } - return map; -} -const isMergeKey = (key) => key === MERGE_KEY || - (isScalar(key) && - key.value === MERGE_KEY && - (!key.type || key.type === Scalar.PLAIN)); -// If the value associated with a merge key is a single mapping node, each of -// its key/value pairs is inserted into the current mapping, unless the key -// already exists in it. If the value associated with the merge key is a -// sequence, then this sequence is expected to contain mapping nodes and each -// of these nodes is merged in turn according to its order in the sequence. -// Keys in mapping nodes earlier in the sequence override keys specified in -// later mapping nodes. -- http://yaml.org/type/merge.html -function mergeToJSMap(ctx, map, value) { - const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value; - if (!isMap(source)) - throw new Error('Merge sources must be maps or map aliases'); - const srcMap = source.toJSON(null, ctx, Map); - for (const [key, value] of srcMap) { - if (map instanceof Map) { - if (!map.has(key)) - map.set(key, value); - } - else if (map instanceof Set) { - map.add(key); - } - else if (!Object.prototype.hasOwnProperty.call(map, key)) { - Object.defineProperty(map, key, { - value, - writable: true, - enumerable: true, - configurable: true - }); - } - } - return map; -} -function stringifyKey(key, jsKey, ctx) { - if (jsKey === null) - return ''; - if (typeof jsKey !== 'object') - return String(jsKey); - if (isNode(key) && ctx?.doc) { - const strCtx = createStringifyContext(ctx.doc, {}); - strCtx.anchors = new Set(); - for (const node of ctx.anchors.keys()) - strCtx.anchors.add(node.anchor); - strCtx.inFlow = true; - strCtx.inStringifyKey = true; - const strKey = key.toString(strCtx); - if (!ctx.mapKeyWarned) { - let jsonStr = JSON.stringify(strKey); - if (jsonStr.length > 40) - jsonStr = jsonStr.substring(0, 36) + '..."'; - warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); - ctx.mapKeyWarned = true; - } - return strKey; - } - return JSON.stringify(jsKey); -} - -export { addPairToJSMap }; diff --git a/bin/node_modules/yaml/browser/dist/nodes/identity.js b/bin/node_modules/yaml/browser/dist/nodes/identity.js deleted file mode 100644 index 7b79920..0000000 --- a/bin/node_modules/yaml/browser/dist/nodes/identity.js +++ /dev/null @@ -1,36 +0,0 @@ -const ALIAS = Symbol.for('yaml.alias'); -const DOC = Symbol.for('yaml.document'); -const MAP = Symbol.for('yaml.map'); -const PAIR = Symbol.for('yaml.pair'); -const SCALAR = Symbol.for('yaml.scalar'); -const SEQ = Symbol.for('yaml.seq'); -const NODE_TYPE = Symbol.for('yaml.node.type'); -const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; -const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; -const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; -const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; -const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR; -const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; -function isCollection(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case MAP: - case SEQ: - return true; - } - return false; -} -function isNode(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case ALIAS: - case MAP: - case SCALAR: - case SEQ: - return true; - } - return false; -} -const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor; - -export { ALIAS, DOC, MAP, NODE_TYPE, PAIR, SCALAR, SEQ, hasAnchor, isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq }; diff --git a/bin/node_modules/yaml/browser/dist/nodes/toJS.js b/bin/node_modules/yaml/browser/dist/nodes/toJS.js deleted file mode 100644 index 0ca6250..0000000 --- a/bin/node_modules/yaml/browser/dist/nodes/toJS.js +++ /dev/null @@ -1,37 +0,0 @@ -import { hasAnchor } from './identity.js'; - -/** - * Recursively convert any node or its contents to native JavaScript - * - * @param value - The input value - * @param arg - If `value` defines a `toJSON()` method, use this - * as its first argument - * @param ctx - Conversion context, originally set in Document#toJS(). If - * `{ keep: true }` is not set, output should be suitable for JSON - * stringification. - */ -function toJS(value, arg, ctx) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - if (Array.isArray(value)) - return value.map((v, i) => toJS(v, String(i), ctx)); - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - if (!ctx || !hasAnchor(value)) - return value.toJSON(arg, ctx); - const data = { aliasCount: 0, count: 1, res: undefined }; - ctx.anchors.set(value, data); - ctx.onCreate = res => { - data.res = res; - delete ctx.onCreate; - }; - const res = value.toJSON(arg, ctx); - if (ctx.onCreate) - ctx.onCreate(res); - return res; - } - if (typeof value === 'bigint' && !ctx?.keep) - return Number(value); - return value; -} - -export { toJS }; diff --git a/bin/node_modules/yaml/browser/dist/parse/cst-scalar.js b/bin/node_modules/yaml/browser/dist/parse/cst-scalar.js deleted file mode 100644 index d4def99..0000000 --- a/bin/node_modules/yaml/browser/dist/parse/cst-scalar.js +++ /dev/null @@ -1,214 +0,0 @@ -import { resolveBlockScalar } from '../compose/resolve-block-scalar.js'; -import { resolveFlowScalar } from '../compose/resolve-flow-scalar.js'; -import { YAMLParseError } from '../errors.js'; -import { stringifyString } from '../stringify/stringifyString.js'; - -function resolveAsScalar(token, strict = true, onError) { - if (token) { - const _onError = (pos, code, message) => { - const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; - if (onError) - onError(offset, code, message); - else - throw new YAMLParseError([offset, offset + 1], code, message); - }; - switch (token.type) { - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return resolveFlowScalar(token, strict, _onError); - case 'block-scalar': - return resolveBlockScalar(token, strict, _onError); - } - } - return null; -} -/** - * Create a new scalar token with `value` - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param value The string representation of the value, which will have its content properly indented. - * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.indent The indent level of the token. - * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. - * @param context.offset The offset position of the token. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function createScalarToken(value, context) { - const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; - const source = stringifyString({ type, value }, { - implicitKey, - indent: indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - const end = context.end ?? [ - { type: 'newline', offset: -1, indent, source: '\n' } - ]; - switch (source[0]) { - case '|': - case '>': { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, end)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - return { type: 'block-scalar', offset, indent, props, source: body }; - } - case '"': - return { type: 'double-quoted-scalar', offset, indent, source, end }; - case "'": - return { type: 'single-quoted-scalar', offset, indent, source, end }; - default: - return { type: 'scalar', offset, indent, source, end }; - } -} -/** - * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. - * - * Best efforts are made to retain any comments previously associated with the `token`, - * though all contents within a collection's `items` will be overwritten. - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. - * @param value The string representation of the value, which will have its content properly indented. - * @param context.afterKey In most cases, values after a key should have an additional level of indentation. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function setScalarValue(token, value, context = {}) { - let { afterKey = false, implicitKey = false, inFlow = false, type } = context; - let indent = 'indent' in token ? token.indent : null; - if (afterKey && typeof indent === 'number') - indent += 2; - if (!type) - switch (token.type) { - case 'single-quoted-scalar': - type = 'QUOTE_SINGLE'; - break; - case 'double-quoted-scalar': - type = 'QUOTE_DOUBLE'; - break; - case 'block-scalar': { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; - break; - } - default: - type = 'PLAIN'; - } - const source = stringifyString({ type, value }, { - implicitKey: implicitKey || indent === null, - indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - switch (source[0]) { - case '|': - case '>': - setBlockScalarValue(token, source); - break; - case '"': - setFlowScalarValue(token, source, 'double-quoted-scalar'); - break; - case "'": - setFlowScalarValue(token, source, 'single-quoted-scalar'); - break; - default: - setFlowScalarValue(token, source, 'scalar'); - } -} -function setBlockScalarValue(token, source) { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - if (token.type === 'block-scalar') { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - header.source = head; - token.source = body; - } - else { - const { offset } = token; - const indent = 'indent' in token ? token.indent : -1; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type: 'block-scalar', indent, props, source: body }); - } -} -/** @returns `true` if last token is a newline */ -function addEndtoBlockProps(props, end) { - if (end) - for (const st of end) - switch (st.type) { - case 'space': - case 'comment': - props.push(st); - break; - case 'newline': - props.push(st); - return true; - } - return false; -} -function setFlowScalarValue(token, source, type) { - switch (token.type) { - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - token.type = type; - token.source = source; - break; - case 'block-scalar': { - const end = token.props.slice(1); - let oa = source.length; - if (token.props[0].type === 'block-scalar-header') - oa -= token.props[0].source.length; - for (const tok of end) - tok.offset += oa; - delete token.props; - Object.assign(token, { type, source, end }); - break; - } - case 'block-map': - case 'block-seq': { - const offset = token.offset + source.length; - const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; - delete token.items; - Object.assign(token, { type, source, end: [nl] }); - break; - } - default: { - const indent = 'indent' in token ? token.indent : -1; - const end = 'end' in token && Array.isArray(token.end) - ? token.end.filter(st => st.type === 'space' || - st.type === 'comment' || - st.type === 'newline') - : []; - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type, indent, source, end }); - } - } -} - -export { createScalarToken, resolveAsScalar, setScalarValue }; diff --git a/bin/node_modules/yaml/browser/dist/parse/cst-stringify.js b/bin/node_modules/yaml/browser/dist/parse/cst-stringify.js deleted file mode 100644 index d6ab58c..0000000 --- a/bin/node_modules/yaml/browser/dist/parse/cst-stringify.js +++ /dev/null @@ -1,61 +0,0 @@ -/** - * Stringify a CST document, token, or collection item - * - * Fair warning: This applies no validation whatsoever, and - * simply concatenates the sources in their logical order. - */ -const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); -function stringifyToken(token) { - switch (token.type) { - case 'block-scalar': { - let res = ''; - for (const tok of token.props) - res += stringifyToken(tok); - return res + token.source; - } - case 'block-map': - case 'block-seq': { - let res = ''; - for (const item of token.items) - res += stringifyItem(item); - return res; - } - case 'flow-collection': { - let res = token.start.source; - for (const item of token.items) - res += stringifyItem(item); - for (const st of token.end) - res += st.source; - return res; - } - case 'document': { - let res = stringifyItem(token); - if (token.end) - for (const st of token.end) - res += st.source; - return res; - } - default: { - let res = token.source; - if ('end' in token && token.end) - for (const st of token.end) - res += st.source; - return res; - } - } -} -function stringifyItem({ start, key, sep, value }) { - let res = ''; - for (const st of start) - res += st.source; - if (key) - res += stringifyToken(key); - if (sep) - for (const st of sep) - res += st.source; - if (value) - res += stringifyToken(value); - return res; -} - -export { stringify }; diff --git a/bin/node_modules/yaml/browser/dist/parse/cst-visit.js b/bin/node_modules/yaml/browser/dist/parse/cst-visit.js deleted file mode 100644 index deca086..0000000 --- a/bin/node_modules/yaml/browser/dist/parse/cst-visit.js +++ /dev/null @@ -1,97 +0,0 @@ -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove item'); -/** - * Apply a visitor to a CST document or item. - * - * Walks through the tree (depth-first) starting from the root, calling a - * `visitor` function with two arguments when entering each item: - * - `item`: The current item, which included the following members: - * - `start: SourceToken[]` – Source tokens before the key or value, - * possibly including its anchor or tag. - * - `key?: Token | null` – Set for pair values. May then be `null`, if - * the key before the `:` separator is empty. - * - `sep?: SourceToken[]` – Source tokens between the key and the value, - * which should include the `:` map value indicator if `value` is set. - * - `value?: Token` – The value of a sequence item, or of a map pair. - * - `path`: The steps from the root to the current node, as an array of - * `['key' | 'value', number]` tuples. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this token, continue with - * next sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current item, then continue with the next one - * - `number`: Set the index of the next step. This is useful especially if - * the index of the current token has changed. - * - `function`: Define the next visitor for this item. After the original - * visitor is called on item entry, next visitors are called after handling - * a non-empty `key` and when exiting the item. - */ -function visit(cst, visitor) { - if ('type' in cst && cst.type === 'document') - cst = { start: cst.start, value: cst.value }; - _visit(Object.freeze([]), cst, visitor); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current item */ -visit.SKIP = SKIP; -/** Remove the current item */ -visit.REMOVE = REMOVE; -/** Find the item at `path` from `cst` as the root */ -visit.itemAtPath = (cst, path) => { - let item = cst; - for (const [field, index] of path) { - const tok = item?.[field]; - if (tok && 'items' in tok) { - item = tok.items[index]; - } - else - return undefined; - } - return item; -}; -/** - * Get the immediate parent collection of the item at `path` from `cst` as the root. - * - * Throws an error if the collection is not found, which should never happen if the item itself exists. - */ -visit.parentCollection = (cst, path) => { - const parent = visit.itemAtPath(cst, path.slice(0, -1)); - const field = path[path.length - 1][0]; - const coll = parent?.[field]; - if (coll && 'items' in coll) - return coll; - throw new Error('Parent collection not found'); -}; -function _visit(path, item, visitor) { - let ctrl = visitor(item, path); - if (typeof ctrl === 'symbol') - return ctrl; - for (const field of ['key', 'value']) { - const token = item[field]; - if (token && 'items' in token) { - for (let i = 0; i < token.items.length; ++i) { - const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - token.items.splice(i, 1); - i -= 1; - } - } - if (typeof ctrl === 'function' && field === 'key') - ctrl = ctrl(item, path); - } - } - return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; -} - -export { visit }; diff --git a/bin/node_modules/yaml/browser/dist/parse/cst.js b/bin/node_modules/yaml/browser/dist/parse/cst.js deleted file mode 100644 index 8bb2f4a..0000000 --- a/bin/node_modules/yaml/browser/dist/parse/cst.js +++ /dev/null @@ -1,98 +0,0 @@ -export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar.js'; -export { stringify } from './cst-stringify.js'; -export { visit } from './cst-visit.js'; - -/** The byte order mark */ -const BOM = '\u{FEFF}'; -/** Start of doc-mode */ -const DOCUMENT = '\x02'; // C0: Start of Text -/** Unexpected end of flow-mode */ -const FLOW_END = '\x18'; // C0: Cancel -/** Next token is a scalar value */ -const SCALAR = '\x1f'; // C0: Unit Separator -/** @returns `true` if `token` is a flow or block collection */ -const isCollection = (token) => !!token && 'items' in token; -/** @returns `true` if `token` is a flow or block scalar; not an alias */ -const isScalar = (token) => !!token && - (token.type === 'scalar' || - token.type === 'single-quoted-scalar' || - token.type === 'double-quoted-scalar' || - token.type === 'block-scalar'); -/* istanbul ignore next */ -/** Get a printable representation of a lexer token */ -function prettyToken(token) { - switch (token) { - case BOM: - return ''; - case DOCUMENT: - return ''; - case FLOW_END: - return ''; - case SCALAR: - return ''; - default: - return JSON.stringify(token); - } -} -/** Identify the type of a lexer token. May return `null` for unknown tokens. */ -function tokenType(source) { - switch (source) { - case BOM: - return 'byte-order-mark'; - case DOCUMENT: - return 'doc-mode'; - case FLOW_END: - return 'flow-error-end'; - case SCALAR: - return 'scalar'; - case '---': - return 'doc-start'; - case '...': - return 'doc-end'; - case '': - case '\n': - case '\r\n': - return 'newline'; - case '-': - return 'seq-item-ind'; - case '?': - return 'explicit-key-ind'; - case ':': - return 'map-value-ind'; - case '{': - return 'flow-map-start'; - case '}': - return 'flow-map-end'; - case '[': - return 'flow-seq-start'; - case ']': - return 'flow-seq-end'; - case ',': - return 'comma'; - } - switch (source[0]) { - case ' ': - case '\t': - return 'space'; - case '#': - return 'comment'; - case '%': - return 'directive-line'; - case '*': - return 'alias'; - case '&': - return 'anchor'; - case '!': - return 'tag'; - case "'": - return 'single-quoted-scalar'; - case '"': - return 'double-quoted-scalar'; - case '|': - case '>': - return 'block-scalar-header'; - } - return null; -} - -export { BOM, DOCUMENT, FLOW_END, SCALAR, isCollection, isScalar, prettyToken, tokenType }; diff --git a/bin/node_modules/yaml/browser/dist/parse/lexer.js b/bin/node_modules/yaml/browser/dist/parse/lexer.js deleted file mode 100644 index 509bfd3..0000000 --- a/bin/node_modules/yaml/browser/dist/parse/lexer.js +++ /dev/null @@ -1,708 +0,0 @@ -import { BOM, DOCUMENT, FLOW_END, SCALAR } from './cst.js'; - -/* -START -> stream - -stream - directive -> line-end -> stream - indent + line-end -> stream - [else] -> line-start - -line-end - comment -> line-end - newline -> . - input-end -> END - -line-start - doc-start -> doc - doc-end -> stream - [else] -> indent -> block-start - -block-start - seq-item-start -> block-start - explicit-key-start -> block-start - map-value-start -> block-start - [else] -> doc - -doc - line-end -> line-start - spaces -> doc - anchor -> doc - tag -> doc - flow-start -> flow -> doc - flow-end -> error -> doc - seq-item-start -> error -> doc - explicit-key-start -> error -> doc - map-value-start -> doc - alias -> doc - quote-start -> quoted-scalar -> doc - block-scalar-header -> line-end -> block-scalar(min) -> line-start - [else] -> plain-scalar(false, min) -> doc - -flow - line-end -> flow - spaces -> flow - anchor -> flow - tag -> flow - flow-start -> flow -> flow - flow-end -> . - seq-item-start -> error -> flow - explicit-key-start -> flow - map-value-start -> flow - alias -> flow - quote-start -> quoted-scalar -> flow - comma -> flow - [else] -> plain-scalar(true, 0) -> flow - -quoted-scalar - quote-end -> . - [else] -> quoted-scalar - -block-scalar(min) - newline + peek(indent < min) -> . - [else] -> block-scalar(min) - -plain-scalar(is-flow, min) - scalar-end(is-flow) -> . - peek(newline + (indent < min)) -> . - [else] -> plain-scalar(min) -*/ -function isEmpty(ch) { - switch (ch) { - case undefined: - case ' ': - case '\n': - case '\r': - case '\t': - return true; - default: - return false; - } -} -const hexDigits = '0123456789ABCDEFabcdef'.split(''); -const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split(''); -const invalidFlowScalarChars = ',[]{}'.split(''); -const invalidAnchorChars = ' ,[]{}\n\r\t'.split(''); -const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch); -/** - * Splits an input string into lexical tokens, i.e. smaller strings that are - * easily identifiable by `tokens.tokenType()`. - * - * Lexing starts always in a "stream" context. Incomplete input may be buffered - * until a complete token can be emitted. - * - * In addition to slices of the original input, the following control characters - * may also be emitted: - * - * - `\x02` (Start of Text): A document starts with the next token - * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) - * - `\x1f` (Unit Separator): Next token is a scalar value - * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents - */ -class Lexer { - constructor() { - /** - * Flag indicating whether the end of the current buffer marks the end of - * all input - */ - this.atEnd = false; - /** - * Explicit indent set in block scalar header, as an offset from the current - * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not - * explicitly set. - */ - this.blockScalarIndent = -1; - /** - * Block scalars that include a + (keep) chomping indicator in their header - * include trailing empty lines, which are otherwise excluded from the - * scalar's contents. - */ - this.blockScalarKeep = false; - /** Current input */ - this.buffer = ''; - /** - * Flag noting whether the map value indicator : can immediately follow this - * node within a flow context. - */ - this.flowKey = false; - /** Count of surrounding flow collection levels. */ - this.flowLevel = 0; - /** - * Minimum level of indentation required for next lines to be parsed as a - * part of the current scalar value. - */ - this.indentNext = 0; - /** Indentation level of the current line. */ - this.indentValue = 0; - /** Position of the next \n character. */ - this.lineEndPos = null; - /** Stores the state of the lexer if reaching the end of incpomplete input */ - this.next = null; - /** A pointer to `buffer`; the current position of the lexer. */ - this.pos = 0; - } - /** - * Generate YAML tokens from the `source` string. If `incomplete`, - * a part of the last line may be left as a buffer for the next call. - * - * @returns A generator of lexical tokens - */ - *lex(source, incomplete = false) { - if (source) { - if (typeof source !== 'string') - throw TypeError('source is not a string'); - this.buffer = this.buffer ? this.buffer + source : source; - this.lineEndPos = null; - } - this.atEnd = !incomplete; - let next = this.next ?? 'stream'; - while (next && (incomplete || this.hasChars(1))) - next = yield* this.parseNext(next); - } - atLineEnd() { - let i = this.pos; - let ch = this.buffer[i]; - while (ch === ' ' || ch === '\t') - ch = this.buffer[++i]; - if (!ch || ch === '#' || ch === '\n') - return true; - if (ch === '\r') - return this.buffer[i + 1] === '\n'; - return false; - } - charAt(n) { - return this.buffer[this.pos + n]; - } - continueScalar(offset) { - let ch = this.buffer[offset]; - if (this.indentNext > 0) { - let indent = 0; - while (ch === ' ') - ch = this.buffer[++indent + offset]; - if (ch === '\r') { - const next = this.buffer[indent + offset + 1]; - if (next === '\n' || (!next && !this.atEnd)) - return offset + indent + 1; - } - return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) - ? offset + indent - : -1; - } - if (ch === '-' || ch === '.') { - const dt = this.buffer.substr(offset, 3); - if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) - return -1; - } - return offset; - } - getLine() { - let end = this.lineEndPos; - if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { - end = this.buffer.indexOf('\n', this.pos); - this.lineEndPos = end; - } - if (end === -1) - return this.atEnd ? this.buffer.substring(this.pos) : null; - if (this.buffer[end - 1] === '\r') - end -= 1; - return this.buffer.substring(this.pos, end); - } - hasChars(n) { - return this.pos + n <= this.buffer.length; - } - setNext(state) { - this.buffer = this.buffer.substring(this.pos); - this.pos = 0; - this.lineEndPos = null; - this.next = state; - return null; - } - peek(n) { - return this.buffer.substr(this.pos, n); - } - *parseNext(next) { - switch (next) { - case 'stream': - return yield* this.parseStream(); - case 'line-start': - return yield* this.parseLineStart(); - case 'block-start': - return yield* this.parseBlockStart(); - case 'doc': - return yield* this.parseDocument(); - case 'flow': - return yield* this.parseFlowCollection(); - case 'quoted-scalar': - return yield* this.parseQuotedScalar(); - case 'block-scalar': - return yield* this.parseBlockScalar(); - case 'plain-scalar': - return yield* this.parsePlainScalar(); - } - } - *parseStream() { - let line = this.getLine(); - if (line === null) - return this.setNext('stream'); - if (line[0] === BOM) { - yield* this.pushCount(1); - line = line.substring(1); - } - if (line[0] === '%') { - let dirEnd = line.length; - let cs = line.indexOf('#'); - while (cs !== -1) { - const ch = line[cs - 1]; - if (ch === ' ' || ch === '\t') { - dirEnd = cs - 1; - break; - } - else { - cs = line.indexOf('#', cs + 1); - } - } - while (true) { - const ch = line[dirEnd - 1]; - if (ch === ' ' || ch === '\t') - dirEnd -= 1; - else - break; - } - const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); - yield* this.pushCount(line.length - n); // possible comment - this.pushNewline(); - return 'stream'; - } - if (this.atLineEnd()) { - const sp = yield* this.pushSpaces(true); - yield* this.pushCount(line.length - sp); - yield* this.pushNewline(); - return 'stream'; - } - yield DOCUMENT; - return yield* this.parseLineStart(); - } - *parseLineStart() { - const ch = this.charAt(0); - if (!ch && !this.atEnd) - return this.setNext('line-start'); - if (ch === '-' || ch === '.') { - if (!this.atEnd && !this.hasChars(4)) - return this.setNext('line-start'); - const s = this.peek(3); - if (s === '---' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - this.indentValue = 0; - this.indentNext = 0; - return 'doc'; - } - else if (s === '...' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - return 'stream'; - } - } - this.indentValue = yield* this.pushSpaces(false); - if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) - this.indentNext = this.indentValue; - return yield* this.parseBlockStart(); - } - *parseBlockStart() { - const [ch0, ch1] = this.peek(2); - if (!ch1 && !this.atEnd) - return this.setNext('block-start'); - if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { - const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); - this.indentNext = this.indentValue + 1; - this.indentValue += n; - return yield* this.parseBlockStart(); - } - return 'doc'; - } - *parseDocument() { - yield* this.pushSpaces(true); - const line = this.getLine(); - if (line === null) - return this.setNext('doc'); - let n = yield* this.pushIndicators(); - switch (line[n]) { - case '#': - yield* this.pushCount(line.length - n); - // fallthrough - case undefined: - yield* this.pushNewline(); - return yield* this.parseLineStart(); - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel = 1; - return 'flow'; - case '}': - case ']': - // this is an error - yield* this.pushCount(1); - return 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'doc'; - case '"': - case "'": - return yield* this.parseQuotedScalar(); - case '|': - case '>': - n += yield* this.parseBlockScalarHeader(); - n += yield* this.pushSpaces(true); - yield* this.pushCount(line.length - n); - yield* this.pushNewline(); - return yield* this.parseBlockScalar(); - default: - return yield* this.parsePlainScalar(); - } - } - *parseFlowCollection() { - let nl, sp; - let indent = -1; - do { - nl = yield* this.pushNewline(); - if (nl > 0) { - sp = yield* this.pushSpaces(false); - this.indentValue = indent = sp; - } - else { - sp = 0; - } - sp += yield* this.pushSpaces(true); - } while (nl + sp > 0); - const line = this.getLine(); - if (line === null) - return this.setNext('flow'); - if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || - (indent === 0 && - (line.startsWith('---') || line.startsWith('...')) && - isEmpty(line[3]))) { - // Allowing for the terminal ] or } at the same (rather than greater) - // indent level as the initial [ or { is technically invalid, but - // failing here would be surprising to users. - const atFlowEndMarker = indent === this.indentNext - 1 && - this.flowLevel === 1 && - (line[0] === ']' || line[0] === '}'); - if (!atFlowEndMarker) { - // this is an error - this.flowLevel = 0; - yield FLOW_END; - return yield* this.parseLineStart(); - } - } - let n = 0; - while (line[n] === ',') { - n += yield* this.pushCount(1); - n += yield* this.pushSpaces(true); - this.flowKey = false; - } - n += yield* this.pushIndicators(); - switch (line[n]) { - case undefined: - return 'flow'; - case '#': - yield* this.pushCount(line.length - n); - return 'flow'; - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel += 1; - return 'flow'; - case '}': - case ']': - yield* this.pushCount(1); - this.flowKey = true; - this.flowLevel -= 1; - return this.flowLevel ? 'flow' : 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'flow'; - case '"': - case "'": - this.flowKey = true; - return yield* this.parseQuotedScalar(); - case ':': { - const next = this.charAt(1); - if (this.flowKey || isEmpty(next) || next === ',') { - this.flowKey = false; - yield* this.pushCount(1); - yield* this.pushSpaces(true); - return 'flow'; - } - } - // fallthrough - default: - this.flowKey = false; - return yield* this.parsePlainScalar(); - } - } - *parseQuotedScalar() { - const quote = this.charAt(0); - let end = this.buffer.indexOf(quote, this.pos + 1); - if (quote === "'") { - while (end !== -1 && this.buffer[end + 1] === "'") - end = this.buffer.indexOf("'", end + 2); - } - else { - // double-quote - while (end !== -1) { - let n = 0; - while (this.buffer[end - 1 - n] === '\\') - n += 1; - if (n % 2 === 0) - break; - end = this.buffer.indexOf('"', end + 1); - } - } - // Only looking for newlines within the quotes - const qb = this.buffer.substring(0, end); - let nl = qb.indexOf('\n', this.pos); - if (nl !== -1) { - while (nl !== -1) { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = qb.indexOf('\n', cs); - } - if (nl !== -1) { - // this is an error caused by an unexpected unindent - end = nl - (qb[nl - 1] === '\r' ? 2 : 1); - } - } - if (end === -1) { - if (!this.atEnd) - return this.setNext('quoted-scalar'); - end = this.buffer.length; - } - yield* this.pushToIndex(end + 1, false); - return this.flowLevel ? 'flow' : 'doc'; - } - *parseBlockScalarHeader() { - this.blockScalarIndent = -1; - this.blockScalarKeep = false; - let i = this.pos; - while (true) { - const ch = this.buffer[++i]; - if (ch === '+') - this.blockScalarKeep = true; - else if (ch > '0' && ch <= '9') - this.blockScalarIndent = Number(ch) - 1; - else if (ch !== '-') - break; - } - return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); - } - *parseBlockScalar() { - let nl = this.pos - 1; // may be -1 if this.pos === 0 - let indent = 0; - let ch; - loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { - switch (ch) { - case ' ': - indent += 1; - break; - case '\n': - nl = i; - indent = 0; - break; - case '\r': { - const next = this.buffer[i + 1]; - if (!next && !this.atEnd) - return this.setNext('block-scalar'); - if (next === '\n') - break; - } // fallthrough - default: - break loop; - } - } - if (!ch && !this.atEnd) - return this.setNext('block-scalar'); - if (indent >= this.indentNext) { - if (this.blockScalarIndent === -1) - this.indentNext = indent; - else - this.indentNext += this.blockScalarIndent; - do { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = this.buffer.indexOf('\n', cs); - } while (nl !== -1); - if (nl === -1) { - if (!this.atEnd) - return this.setNext('block-scalar'); - nl = this.buffer.length; - } - } - if (!this.blockScalarKeep) { - do { - let i = nl - 1; - let ch = this.buffer[i]; - if (ch === '\r') - ch = this.buffer[--i]; - const lastChar = i; // Drop the line if last char not more indented - while (ch === ' ' || ch === '\t') - ch = this.buffer[--i]; - if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) - nl = i; - else - break; - } while (true); - } - yield SCALAR; - yield* this.pushToIndex(nl + 1, true); - return yield* this.parseLineStart(); - } - *parsePlainScalar() { - const inFlow = this.flowLevel > 0; - let end = this.pos - 1; - let i = this.pos - 1; - let ch; - while ((ch = this.buffer[++i])) { - if (ch === ':') { - const next = this.buffer[i + 1]; - if (isEmpty(next) || (inFlow && next === ',')) - break; - end = i; - } - else if (isEmpty(ch)) { - let next = this.buffer[i + 1]; - if (ch === '\r') { - if (next === '\n') { - i += 1; - ch = '\n'; - next = this.buffer[i + 1]; - } - else - end = i; - } - if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next))) - break; - if (ch === '\n') { - const cs = this.continueScalar(i + 1); - if (cs === -1) - break; - i = Math.max(i, cs - 2); // to advance, but still account for ' #' - } - } - else { - if (inFlow && invalidFlowScalarChars.includes(ch)) - break; - end = i; - } - } - if (!ch && !this.atEnd) - return this.setNext('plain-scalar'); - yield SCALAR; - yield* this.pushToIndex(end + 1, true); - return inFlow ? 'flow' : 'doc'; - } - *pushCount(n) { - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos += n; - return n; - } - return 0; - } - *pushToIndex(i, allowEmpty) { - const s = this.buffer.slice(this.pos, i); - if (s) { - yield s; - this.pos += s.length; - return s.length; - } - else if (allowEmpty) - yield ''; - return 0; - } - *pushIndicators() { - switch (this.charAt(0)) { - case '!': - return ((yield* this.pushTag()) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '&': - return ((yield* this.pushUntil(isNotAnchorChar)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '-': // this is an error - case '?': // this is an error outside flow collections - case ':': { - const inFlow = this.flowLevel > 0; - const ch1 = this.charAt(1); - if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) { - if (!inFlow) - this.indentNext = this.indentValue + 1; - else if (this.flowKey) - this.flowKey = false; - return ((yield* this.pushCount(1)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - } - } - } - return 0; - } - *pushTag() { - if (this.charAt(1) === '<') { - let i = this.pos + 2; - let ch = this.buffer[i]; - while (!isEmpty(ch) && ch !== '>') - ch = this.buffer[++i]; - return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); - } - else { - let i = this.pos + 1; - let ch = this.buffer[i]; - while (ch) { - if (tagChars.includes(ch)) - ch = this.buffer[++i]; - else if (ch === '%' && - hexDigits.includes(this.buffer[i + 1]) && - hexDigits.includes(this.buffer[i + 2])) { - ch = this.buffer[(i += 3)]; - } - else - break; - } - return yield* this.pushToIndex(i, false); - } - } - *pushNewline() { - const ch = this.buffer[this.pos]; - if (ch === '\n') - return yield* this.pushCount(1); - else if (ch === '\r' && this.charAt(1) === '\n') - return yield* this.pushCount(2); - else - return 0; - } - *pushSpaces(allowTabs) { - let i = this.pos - 1; - let ch; - do { - ch = this.buffer[++i]; - } while (ch === ' ' || (allowTabs && ch === '\t')); - const n = i - this.pos; - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos = i; - } - return n; - } - *pushUntil(test) { - let i = this.pos; - let ch = this.buffer[i]; - while (!test(ch)) - ch = this.buffer[++i]; - return yield* this.pushToIndex(i, false); - } -} - -export { Lexer }; diff --git a/bin/node_modules/yaml/browser/dist/parse/line-counter.js b/bin/node_modules/yaml/browser/dist/parse/line-counter.js deleted file mode 100644 index 002ce24..0000000 --- a/bin/node_modules/yaml/browser/dist/parse/line-counter.js +++ /dev/null @@ -1,39 +0,0 @@ -/** - * Tracks newlines during parsing in order to provide an efficient API for - * determining the one-indexed `{ line, col }` position for any offset - * within the input. - */ -class LineCounter { - constructor() { - this.lineStarts = []; - /** - * Should be called in ascending order. Otherwise, call - * `lineCounter.lineStarts.sort()` before calling `linePos()`. - */ - this.addNewLine = (offset) => this.lineStarts.push(offset); - /** - * Performs a binary search and returns the 1-indexed { line, col } - * position of `offset`. If `line === 0`, `addNewLine` has never been - * called or `offset` is before the first known newline. - */ - this.linePos = (offset) => { - let low = 0; - let high = this.lineStarts.length; - while (low < high) { - const mid = (low + high) >> 1; // Math.floor((low + high) / 2) - if (this.lineStarts[mid] < offset) - low = mid + 1; - else - high = mid; - } - if (this.lineStarts[low] === offset) - return { line: low + 1, col: 1 }; - if (low === 0) - return { line: 0, col: offset }; - const start = this.lineStarts[low - 1]; - return { line: low, col: offset - start + 1 }; - }; - } -} - -export { LineCounter }; diff --git a/bin/node_modules/yaml/browser/dist/parse/parser.js b/bin/node_modules/yaml/browser/dist/parse/parser.js deleted file mode 100644 index 65da03d..0000000 --- a/bin/node_modules/yaml/browser/dist/parse/parser.js +++ /dev/null @@ -1,953 +0,0 @@ -import { tokenType } from './cst.js'; -import { Lexer } from './lexer.js'; - -function includesToken(list, type) { - for (let i = 0; i < list.length; ++i) - if (list[i].type === type) - return true; - return false; -} -function findNonEmptyIndex(list) { - for (let i = 0; i < list.length; ++i) { - switch (list[i].type) { - case 'space': - case 'comment': - case 'newline': - break; - default: - return i; - } - } - return -1; -} -function isFlowToken(token) { - switch (token?.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'flow-collection': - return true; - default: - return false; - } -} -function getPrevProps(parent) { - switch (parent.type) { - case 'document': - return parent.start; - case 'block-map': { - const it = parent.items[parent.items.length - 1]; - return it.sep ?? it.start; - } - case 'block-seq': - return parent.items[parent.items.length - 1].start; - /* istanbul ignore next should not happen */ - default: - return []; - } -} -/** Note: May modify input array */ -function getFirstKeyStartProps(prev) { - if (prev.length === 0) - return []; - let i = prev.length; - loop: while (--i >= 0) { - switch (prev[i].type) { - case 'doc-start': - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - case 'newline': - break loop; - } - } - while (prev[++i]?.type === 'space') { - /* loop */ - } - return prev.splice(i, prev.length); -} -function fixFlowSeqItems(fc) { - if (fc.start.type === 'flow-seq-start') { - for (const it of fc.items) { - if (it.sep && - !it.value && - !includesToken(it.start, 'explicit-key-ind') && - !includesToken(it.sep, 'map-value-ind')) { - if (it.key) - it.value = it.key; - delete it.key; - if (isFlowToken(it.value)) { - if (it.value.end) - Array.prototype.push.apply(it.value.end, it.sep); - else - it.value.end = it.sep; - } - else - Array.prototype.push.apply(it.start, it.sep); - delete it.sep; - } - } - } -} -/** - * A YAML concrete syntax tree (CST) parser - * - * ```ts - * const src: string = ... - * for (const token of new Parser().parse(src)) { - * // token: Token - * } - * ``` - * - * To use the parser with a user-provided lexer: - * - * ```ts - * function* parse(source: string, lexer: Lexer) { - * const parser = new Parser() - * for (const lexeme of lexer.lex(source)) - * yield* parser.next(lexeme) - * yield* parser.end() - * } - * - * const src: string = ... - * const lexer = new Lexer() - * for (const token of parse(src, lexer)) { - * // token: Token - * } - * ``` - */ -class Parser { - /** - * @param onNewLine - If defined, called separately with the start position of - * each new line (in `parse()`, including the start of input). - */ - constructor(onNewLine) { - /** If true, space and sequence indicators count as indentation */ - this.atNewLine = true; - /** If true, next token is a scalar value */ - this.atScalar = false; - /** Current indentation level */ - this.indent = 0; - /** Current offset since the start of parsing */ - this.offset = 0; - /** On the same line with a block map key */ - this.onKeyLine = false; - /** Top indicates the node that's currently being built */ - this.stack = []; - /** The source of the current token, set in parse() */ - this.source = ''; - /** The type of the current token, set in parse() */ - this.type = ''; - // Must be defined after `next()` - this.lexer = new Lexer(); - this.onNewLine = onNewLine; - } - /** - * Parse `source` as a YAML stream. - * If `incomplete`, a part of the last line may be left as a buffer for the next call. - * - * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. - * - * @returns A generator of tokens representing each directive, document, and other structure. - */ - *parse(source, incomplete = false) { - if (this.onNewLine && this.offset === 0) - this.onNewLine(0); - for (const lexeme of this.lexer.lex(source, incomplete)) - yield* this.next(lexeme); - if (!incomplete) - yield* this.end(); - } - /** - * Advance the parser by the `source` of one lexical token. - */ - *next(source) { - this.source = source; - if (this.atScalar) { - this.atScalar = false; - yield* this.step(); - this.offset += source.length; - return; - } - const type = tokenType(source); - if (!type) { - const message = `Not a YAML token: ${source}`; - yield* this.pop({ type: 'error', offset: this.offset, message, source }); - this.offset += source.length; - } - else if (type === 'scalar') { - this.atNewLine = false; - this.atScalar = true; - this.type = 'scalar'; - } - else { - this.type = type; - yield* this.step(); - switch (type) { - case 'newline': - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) - this.onNewLine(this.offset + source.length); - break; - case 'space': - if (this.atNewLine && source[0] === ' ') - this.indent += source.length; - break; - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - if (this.atNewLine) - this.indent += source.length; - break; - case 'doc-mode': - case 'flow-error-end': - return; - default: - this.atNewLine = false; - } - this.offset += source.length; - } - } - /** Call at end of input to push out any remaining constructions */ - *end() { - while (this.stack.length > 0) - yield* this.pop(); - } - get sourceToken() { - const st = { - type: this.type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - return st; - } - *step() { - const top = this.peek(1); - if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { - while (this.stack.length > 0) - yield* this.pop(); - this.stack.push({ - type: 'doc-end', - offset: this.offset, - source: this.source - }); - return; - } - if (!top) - return yield* this.stream(); - switch (top.type) { - case 'document': - return yield* this.document(top); - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return yield* this.scalar(top); - case 'block-scalar': - return yield* this.blockScalar(top); - case 'block-map': - return yield* this.blockMap(top); - case 'block-seq': - return yield* this.blockSequence(top); - case 'flow-collection': - return yield* this.flowCollection(top); - case 'doc-end': - return yield* this.documentEnd(top); - } - /* istanbul ignore next should not happen */ - yield* this.pop(); - } - peek(n) { - return this.stack[this.stack.length - n]; - } - *pop(error) { - const token = error ?? this.stack.pop(); - /* istanbul ignore if should not happen */ - if (!token) { - const message = 'Tried to pop an empty stack'; - yield { type: 'error', offset: this.offset, source: '', message }; - } - else if (this.stack.length === 0) { - yield token; - } - else { - const top = this.peek(1); - if (token.type === 'block-scalar') { - // Block scalars use their parent rather than header indent - token.indent = 'indent' in top ? top.indent : 0; - } - else if (token.type === 'flow-collection' && top.type === 'document') { - // Ignore all indent for top-level flow collections - token.indent = 0; - } - if (token.type === 'flow-collection') - fixFlowSeqItems(token); - switch (top.type) { - case 'document': - top.value = token; - break; - case 'block-scalar': - top.props.push(token); // error - break; - case 'block-map': { - const it = top.items[top.items.length - 1]; - if (it.value) { - top.items.push({ start: [], key: token, sep: [] }); - this.onKeyLine = true; - return; - } - else if (it.sep) { - it.value = token; - } - else { - Object.assign(it, { key: token, sep: [] }); - this.onKeyLine = !includesToken(it.start, 'explicit-key-ind'); - return; - } - break; - } - case 'block-seq': { - const it = top.items[top.items.length - 1]; - if (it.value) - top.items.push({ start: [], value: token }); - else - it.value = token; - break; - } - case 'flow-collection': { - const it = top.items[top.items.length - 1]; - if (!it || it.value) - top.items.push({ start: [], key: token, sep: [] }); - else if (it.sep) - it.value = token; - else - Object.assign(it, { key: token, sep: [] }); - return; - } - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.pop(token); - } - if ((top.type === 'document' || - top.type === 'block-map' || - top.type === 'block-seq') && - (token.type === 'block-map' || token.type === 'block-seq')) { - const last = token.items[token.items.length - 1]; - if (last && - !last.sep && - !last.value && - last.start.length > 0 && - findNonEmptyIndex(last.start) === -1 && - (token.indent === 0 || - last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { - if (top.type === 'document') - top.end = last.start; - else - top.items.push({ start: last.start }); - token.items.splice(-1, 1); - } - } - } - } - *stream() { - switch (this.type) { - case 'directive-line': - yield { type: 'directive', offset: this.offset, source: this.source }; - return; - case 'byte-order-mark': - case 'space': - case 'comment': - case 'newline': - yield this.sourceToken; - return; - case 'doc-mode': - case 'doc-start': { - const doc = { - type: 'document', - offset: this.offset, - start: [] - }; - if (this.type === 'doc-start') - doc.start.push(this.sourceToken); - this.stack.push(doc); - return; - } - } - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML stream`, - source: this.source - }; - } - *document(doc) { - if (doc.value) - return yield* this.lineEnd(doc); - switch (this.type) { - case 'doc-start': { - if (findNonEmptyIndex(doc.start) !== -1) { - yield* this.pop(); - yield* this.step(); - } - else - doc.start.push(this.sourceToken); - return; - } - case 'anchor': - case 'tag': - case 'space': - case 'comment': - case 'newline': - doc.start.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(doc); - if (bv) - this.stack.push(bv); - else { - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML document`, - source: this.source - }; - } - } - *scalar(scalar) { - if (this.type === 'map-value-ind') { - const prev = getPrevProps(this.peek(2)); - const start = getFirstKeyStartProps(prev); - let sep; - if (scalar.end) { - sep = scalar.end; - sep.push(this.sourceToken); - delete scalar.end; - } - else - sep = [this.sourceToken]; - const map = { - type: 'block-map', - offset: scalar.offset, - indent: scalar.indent, - items: [{ start, key: scalar, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else - yield* this.lineEnd(scalar); - } - *blockScalar(scalar) { - switch (this.type) { - case 'space': - case 'comment': - case 'newline': - scalar.props.push(this.sourceToken); - return; - case 'scalar': - scalar.source = this.source; - // block-scalar source includes trailing newline - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - yield* this.pop(); - break; - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.step(); - } - } - *blockMap(map) { - const it = map.items[map.items.length - 1]; - // it.sep is true-ish if pair already has key or : separator - switch (this.type) { - case 'newline': - this.onKeyLine = false; - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'space': - case 'comment': - if (it.value) { - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - if (this.atIndentedComment(it.start, map.indent)) { - const prev = map.items[map.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - map.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - } - if (this.indent >= map.indent) { - const atNextItem = !this.onKeyLine && - this.indent === map.indent && - it.sep && - this.type !== 'seq-item-ind'; - // For empty nodes, assign newline-separated not indented empty tokens to following node - let start = []; - if (atNextItem && it.sep && !it.value) { - const nl = []; - for (let i = 0; i < it.sep.length; ++i) { - const st = it.sep[i]; - switch (st.type) { - case 'newline': - nl.push(i); - break; - case 'space': - break; - case 'comment': - if (st.indent > map.indent) - nl.length = 0; - break; - default: - nl.length = 0; - } - } - if (nl.length >= 2) - start = it.sep.splice(nl[1]); - } - switch (this.type) { - case 'anchor': - case 'tag': - if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - this.onKeyLine = true; - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'explicit-key-ind': - if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) { - it.start.push(this.sourceToken); - } - else if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - } - else { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }); - } - this.onKeyLine = true; - return; - case 'map-value-ind': - if (includesToken(it.start, 'explicit-key-ind')) { - if (!it.sep) { - if (includesToken(it.start, 'newline')) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else { - const start = getFirstKeyStartProps(it.start); - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - } - else if (it.value) { - map.items.push({ start: [], key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - else if (isFlowToken(it.key) && - !includesToken(it.sep, 'newline')) { - const start = getFirstKeyStartProps(it.start); - const key = it.key; - const sep = it.sep; - sep.push(this.sourceToken); - // @ts-expect-error type guard is wrong here - delete it.key, delete it.sep; - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key, sep }] - }); - } - else if (start.length > 0) { - // Not actually at next item - it.sep = it.sep.concat(start, this.sourceToken); - } - else { - it.sep.push(this.sourceToken); - } - } - else { - if (!it.sep) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else if (it.value || atNextItem) { - map.items.push({ start, key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [], key: null, sep: [this.sourceToken] }] - }); - } - else { - it.sep.push(this.sourceToken); - } - } - this.onKeyLine = true; - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (atNextItem || it.value) { - map.items.push({ start, key: fs, sep: [] }); - this.onKeyLine = true; - } - else if (it.sep) { - this.stack.push(fs); - } - else { - Object.assign(it, { key: fs, sep: [] }); - this.onKeyLine = true; - } - return; - } - default: { - const bv = this.startBlockValue(map); - if (bv) { - if (atNextItem && - bv.type !== 'block-seq' && - includesToken(it.start, 'explicit-key-ind')) { - map.items.push({ start }); - } - this.stack.push(bv); - return; - } - } - } - } - yield* this.pop(); - yield* this.step(); - } - *blockSequence(seq) { - const it = seq.items[seq.items.length - 1]; - switch (this.type) { - case 'newline': - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - seq.items.push({ start: [this.sourceToken] }); - } - else - it.start.push(this.sourceToken); - return; - case 'space': - case 'comment': - if (it.value) - seq.items.push({ start: [this.sourceToken] }); - else { - if (this.atIndentedComment(it.start, seq.indent)) { - const prev = seq.items[seq.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - seq.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - case 'anchor': - case 'tag': - if (it.value || this.indent <= seq.indent) - break; - it.start.push(this.sourceToken); - return; - case 'seq-item-ind': - if (this.indent !== seq.indent) - break; - if (it.value || includesToken(it.start, 'seq-item-ind')) - seq.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - } - if (this.indent > seq.indent) { - const bv = this.startBlockValue(seq); - if (bv) { - this.stack.push(bv); - return; - } - } - yield* this.pop(); - yield* this.step(); - } - *flowCollection(fc) { - const it = fc.items[fc.items.length - 1]; - if (this.type === 'flow-error-end') { - let top; - do { - yield* this.pop(); - top = this.peek(1); - } while (top && top.type === 'flow-collection'); - } - else if (fc.end.length === 0) { - switch (this.type) { - case 'comma': - case 'explicit-key-ind': - if (!it || it.sep) - fc.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - case 'map-value-ind': - if (!it || it.value) - fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - Object.assign(it, { key: null, sep: [this.sourceToken] }); - return; - case 'space': - case 'comment': - case 'newline': - case 'anchor': - case 'tag': - if (!it || it.value) - fc.items.push({ start: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - it.start.push(this.sourceToken); - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (!it || it.value) - fc.items.push({ start: [], key: fs, sep: [] }); - else if (it.sep) - this.stack.push(fs); - else - Object.assign(it, { key: fs, sep: [] }); - return; - } - case 'flow-map-end': - case 'flow-seq-end': - fc.end.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(fc); - /* istanbul ignore else should not happen */ - if (bv) - this.stack.push(bv); - else { - yield* this.pop(); - yield* this.step(); - } - } - else { - const parent = this.peek(2); - if (parent.type === 'block-map' && - ((this.type === 'map-value-ind' && parent.indent === fc.indent) || - (this.type === 'newline' && - !parent.items[parent.items.length - 1].sep))) { - yield* this.pop(); - yield* this.step(); - } - else if (this.type === 'map-value-ind' && - parent.type !== 'flow-collection') { - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - fixFlowSeqItems(fc); - const sep = fc.end.splice(1, fc.end.length); - sep.push(this.sourceToken); - const map = { - type: 'block-map', - offset: fc.offset, - indent: fc.indent, - items: [{ start, key: fc, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else { - yield* this.lineEnd(fc); - } - } - } - flowScalar(type) { - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - return { - type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - } - startBlockValue(parent) { - switch (this.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return this.flowScalar(this.type); - case 'block-scalar-header': - return { - type: 'block-scalar', - offset: this.offset, - indent: this.indent, - props: [this.sourceToken], - source: '' - }; - case 'flow-map-start': - case 'flow-seq-start': - return { - type: 'flow-collection', - offset: this.offset, - indent: this.indent, - start: this.sourceToken, - items: [], - end: [] - }; - case 'seq-item-ind': - return { - type: 'block-seq', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }; - case 'explicit-key-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - start.push(this.sourceToken); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start }] - }; - } - case 'map-value-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }; - } - } - return null; - } - atIndentedComment(start, indent) { - if (this.type !== 'comment') - return false; - if (this.indent <= indent) - return false; - return start.every(st => st.type === 'newline' || st.type === 'space'); - } - *documentEnd(docEnd) { - if (this.type !== 'doc-mode') { - if (docEnd.end) - docEnd.end.push(this.sourceToken); - else - docEnd.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } - *lineEnd(token) { - switch (this.type) { - case 'comma': - case 'doc-start': - case 'doc-end': - case 'flow-seq-end': - case 'flow-map-end': - case 'map-value-ind': - yield* this.pop(); - yield* this.step(); - break; - case 'newline': - this.onKeyLine = false; - // fallthrough - case 'space': - case 'comment': - default: - // all other values are errors - if (token.end) - token.end.push(this.sourceToken); - else - token.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } -} - -export { Parser }; diff --git a/bin/node_modules/yaml/browser/dist/public-api.js b/bin/node_modules/yaml/browser/dist/public-api.js deleted file mode 100644 index b711aa0..0000000 --- a/bin/node_modules/yaml/browser/dist/public-api.js +++ /dev/null @@ -1,99 +0,0 @@ -import { Composer } from './compose/composer.js'; -import { Document } from './doc/Document.js'; -import { prettifyError, YAMLParseError } from './errors.js'; -import { warn } from './log.js'; -import { LineCounter } from './parse/line-counter.js'; -import { Parser } from './parse/parser.js'; - -function parseOptions(options) { - const prettyErrors = options.prettyErrors !== false; - const lineCounter = options.lineCounter || (prettyErrors && new LineCounter()) || null; - return { lineCounter, prettyErrors }; -} -/** - * Parse the input as a stream of YAML documents. - * - * Documents should be separated from each other by `...` or `---` marker lines. - * - * @returns If an empty `docs` array is returned, it will be of type - * EmptyStream and contain additional stream information. In - * TypeScript, you should use `'empty' in docs` as a type guard for it. - */ -function parseAllDocuments(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser = new Parser(lineCounter?.addNewLine); - const composer = new Composer(options); - const docs = Array.from(composer.compose(parser.parse(source))); - if (prettyErrors && lineCounter) - for (const doc of docs) { - doc.errors.forEach(prettifyError(source, lineCounter)); - doc.warnings.forEach(prettifyError(source, lineCounter)); - } - if (docs.length > 0) - return docs; - return Object.assign([], { empty: true }, composer.streamInfo()); -} -/** Parse an input string into a single YAML.Document */ -function parseDocument(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser = new Parser(lineCounter?.addNewLine); - const composer = new Composer(options); - // `doc` is always set by compose.end(true) at the very latest - let doc = null; - for (const _doc of composer.compose(parser.parse(source), true, source.length)) { - if (!doc) - doc = _doc; - else if (doc.options.logLevel !== 'silent') { - doc.errors.push(new YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); - break; - } - } - if (prettyErrors && lineCounter) { - doc.errors.forEach(prettifyError(source, lineCounter)); - doc.warnings.forEach(prettifyError(source, lineCounter)); - } - return doc; -} -function parse(src, reviver, options) { - let _reviver = undefined; - if (typeof reviver === 'function') { - _reviver = reviver; - } - else if (options === undefined && reviver && typeof reviver === 'object') { - options = reviver; - } - const doc = parseDocument(src, options); - if (!doc) - return null; - doc.warnings.forEach(warning => warn(doc.options.logLevel, warning)); - if (doc.errors.length > 0) { - if (doc.options.logLevel !== 'silent') - throw doc.errors[0]; - else - doc.errors = []; - } - return doc.toJS(Object.assign({ reviver: _reviver }, options)); -} -function stringify(value, replacer, options) { - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - } - if (typeof options === 'string') - options = options.length; - if (typeof options === 'number') { - const indent = Math.round(options); - options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; - } - if (value === undefined) { - const { keepUndefined } = options ?? replacer ?? {}; - if (!keepUndefined) - return undefined; - } - return new Document(value, _replacer, options).toString(options); -} - -export { parse, parseAllDocuments, parseDocument, stringify }; diff --git a/bin/node_modules/yaml/browser/dist/schema/Schema.js b/bin/node_modules/yaml/browser/dist/schema/Schema.js deleted file mode 100644 index 6b68b8a..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/Schema.js +++ /dev/null @@ -1,38 +0,0 @@ -import { MAP, SCALAR, SEQ } from '../nodes/identity.js'; -import { map } from './common/map.js'; -import { seq } from './common/seq.js'; -import { string } from './common/string.js'; -import { getTags, coreKnownTags } from './tags.js'; - -const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; -class Schema { - constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { - this.compat = Array.isArray(compat) - ? getTags(compat, 'compat') - : compat - ? getTags(null, compat) - : null; - this.merge = !!merge; - this.name = (typeof schema === 'string' && schema) || 'core'; - this.knownTags = resolveKnownTags ? coreKnownTags : {}; - this.tags = getTags(customTags, this.name); - this.toStringOptions = toStringDefaults ?? null; - Object.defineProperty(this, MAP, { value: map }); - Object.defineProperty(this, SCALAR, { value: string }); - Object.defineProperty(this, SEQ, { value: seq }); - // Used by createMap() - this.sortMapEntries = - typeof sortMapEntries === 'function' - ? sortMapEntries - : sortMapEntries === true - ? sortMapEntriesByKey - : null; - } - clone() { - const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); - copy.tags = this.tags.slice(); - return copy; - } -} - -export { Schema }; diff --git a/bin/node_modules/yaml/browser/dist/schema/common/map.js b/bin/node_modules/yaml/browser/dist/schema/common/map.js deleted file mode 100644 index af97b78..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/common/map.js +++ /dev/null @@ -1,17 +0,0 @@ -import { isMap } from '../../nodes/identity.js'; -import { YAMLMap } from '../../nodes/YAMLMap.js'; - -const map = { - collection: 'map', - default: true, - nodeClass: YAMLMap, - tag: 'tag:yaml.org,2002:map', - resolve(map, onError) { - if (!isMap(map)) - onError('Expected a mapping for this tag'); - return map; - }, - createNode: (schema, obj, ctx) => YAMLMap.from(schema, obj, ctx) -}; - -export { map }; diff --git a/bin/node_modules/yaml/browser/dist/schema/common/null.js b/bin/node_modules/yaml/browser/dist/schema/common/null.js deleted file mode 100644 index fcbe1b7..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/common/null.js +++ /dev/null @@ -1,15 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; - -const nullTag = { - identify: value => value == null, - createNode: () => new Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^(?:~|[Nn]ull|NULL)?$/, - resolve: () => new Scalar(null), - stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) - ? source - : ctx.options.nullStr -}; - -export { nullTag }; diff --git a/bin/node_modules/yaml/browser/dist/schema/common/seq.js b/bin/node_modules/yaml/browser/dist/schema/common/seq.js deleted file mode 100644 index 1915b60..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/common/seq.js +++ /dev/null @@ -1,17 +0,0 @@ -import { isSeq } from '../../nodes/identity.js'; -import { YAMLSeq } from '../../nodes/YAMLSeq.js'; - -const seq = { - collection: 'seq', - default: true, - nodeClass: YAMLSeq, - tag: 'tag:yaml.org,2002:seq', - resolve(seq, onError) { - if (!isSeq(seq)) - onError('Expected a sequence for this tag'); - return seq; - }, - createNode: (schema, obj, ctx) => YAMLSeq.from(schema, obj, ctx) -}; - -export { seq }; diff --git a/bin/node_modules/yaml/browser/dist/schema/common/string.js b/bin/node_modules/yaml/browser/dist/schema/common/string.js deleted file mode 100644 index a064f7b..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/common/string.js +++ /dev/null @@ -1,14 +0,0 @@ -import { stringifyString } from '../../stringify/stringifyString.js'; - -const string = { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify(item, ctx, onComment, onChompKeep) { - ctx = Object.assign({ actualString: true }, ctx); - return stringifyString(item, ctx, onComment, onChompKeep); - } -}; - -export { string }; diff --git a/bin/node_modules/yaml/browser/dist/schema/core/bool.js b/bin/node_modules/yaml/browser/dist/schema/core/bool.js deleted file mode 100644 index ab3c943..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/core/bool.js +++ /dev/null @@ -1,19 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; - -const boolTag = { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, - resolve: str => new Scalar(str[0] === 't' || str[0] === 'T'), - stringify({ source, value }, ctx) { - if (source && boolTag.test.test(source)) { - const sv = source[0] === 't' || source[0] === 'T'; - if (value === sv) - return source; - } - return value ? ctx.options.trueStr : ctx.options.falseStr; - } -}; - -export { boolTag }; diff --git a/bin/node_modules/yaml/browser/dist/schema/core/float.js b/bin/node_modules/yaml/browser/dist/schema/core/float.js deleted file mode 100644 index 3fa9cf8..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/core/float.js +++ /dev/null @@ -1,43 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; -import { stringifyNumber } from '../../stringify/stringifyNumber.js'; - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/, - resolve: str => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, - resolve: str => parseFloat(str), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, - resolve(str) { - const node = new Scalar(parseFloat(str)); - const dot = str.indexOf('.'); - if (dot !== -1 && str[str.length - 1] === '0') - node.minFractionDigits = str.length - dot - 1; - return node; - }, - stringify: stringifyNumber -}; - -export { float, floatExp, floatNaN }; diff --git a/bin/node_modules/yaml/browser/dist/schema/core/int.js b/bin/node_modules/yaml/browser/dist/schema/core/int.js deleted file mode 100644 index 7091235..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/core/int.js +++ /dev/null @@ -1,38 +0,0 @@ -import { stringifyNumber } from '../../stringify/stringifyNumber.js'; - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value) && value >= 0) - return prefix + value.toString(radix); - return stringifyNumber(node); -} -const intOct = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^0o[0-7]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt), - stringify: node => intStringify(node, 8, '0o') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9]+$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber -}; -const intHex = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^0x[0-9a-fA-F]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -export { int, intHex, intOct }; diff --git a/bin/node_modules/yaml/browser/dist/schema/core/schema.js b/bin/node_modules/yaml/browser/dist/schema/core/schema.js deleted file mode 100644 index dd02b2e..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/core/schema.js +++ /dev/null @@ -1,23 +0,0 @@ -import { map } from '../common/map.js'; -import { nullTag } from '../common/null.js'; -import { seq } from '../common/seq.js'; -import { string } from '../common/string.js'; -import { boolTag } from './bool.js'; -import { floatNaN, floatExp, float } from './float.js'; -import { intOct, int, intHex } from './int.js'; - -const schema = [ - map, - seq, - string, - nullTag, - boolTag, - intOct, - int, - intHex, - floatNaN, - floatExp, - float -]; - -export { schema }; diff --git a/bin/node_modules/yaml/browser/dist/schema/json/schema.js b/bin/node_modules/yaml/browser/dist/schema/json/schema.js deleted file mode 100644 index 16d75ce..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/json/schema.js +++ /dev/null @@ -1,62 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; -import { map } from '../common/map.js'; -import { seq } from '../common/seq.js'; - -function intIdentify(value) { - return typeof value === 'bigint' || Number.isInteger(value); -} -const stringifyJSON = ({ value }) => JSON.stringify(value); -const jsonScalars = [ - { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify: stringifyJSON - }, - { - identify: value => value == null, - createNode: () => new Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^null$/, - resolve: () => null, - stringify: stringifyJSON - }, - { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^true|false$/, - resolve: str => str === 'true', - stringify: stringifyJSON - }, - { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^-?(?:0|[1-9][0-9]*)$/, - resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), - stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value) - }, - { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, - resolve: str => parseFloat(str), - stringify: stringifyJSON - } -]; -const jsonError = { - default: true, - tag: '', - test: /^/, - resolve(str, onError) { - onError(`Unresolved plain scalar ${JSON.stringify(str)}`); - return str; - } -}; -const schema = [map, seq].concat(jsonScalars, jsonError); - -export { schema }; diff --git a/bin/node_modules/yaml/browser/dist/schema/tags.js b/bin/node_modules/yaml/browser/dist/schema/tags.js deleted file mode 100644 index 84e23fc..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/tags.js +++ /dev/null @@ -1,83 +0,0 @@ -import { map } from './common/map.js'; -import { nullTag } from './common/null.js'; -import { seq } from './common/seq.js'; -import { string } from './common/string.js'; -import { boolTag } from './core/bool.js'; -import { float, floatExp, floatNaN } from './core/float.js'; -import { int, intHex, intOct } from './core/int.js'; -import { schema } from './core/schema.js'; -import { schema as schema$1 } from './json/schema.js'; -import { binary } from './yaml-1.1/binary.js'; -import { omap } from './yaml-1.1/omap.js'; -import { pairs } from './yaml-1.1/pairs.js'; -import { schema as schema$2 } from './yaml-1.1/schema.js'; -import { set } from './yaml-1.1/set.js'; -import { timestamp, floatTime, intTime } from './yaml-1.1/timestamp.js'; - -const schemas = new Map([ - ['core', schema], - ['failsafe', [map, seq, string]], - ['json', schema$1], - ['yaml11', schema$2], - ['yaml-1.1', schema$2] -]); -const tagsByName = { - binary, - bool: boolTag, - float, - floatExp, - floatNaN, - floatTime, - int, - intHex, - intOct, - intTime, - map, - null: nullTag, - omap, - pairs, - seq, - set, - timestamp -}; -const coreKnownTags = { - 'tag:yaml.org,2002:binary': binary, - 'tag:yaml.org,2002:omap': omap, - 'tag:yaml.org,2002:pairs': pairs, - 'tag:yaml.org,2002:set': set, - 'tag:yaml.org,2002:timestamp': timestamp -}; -function getTags(customTags, schemaName) { - let tags = schemas.get(schemaName); - if (!tags) { - if (Array.isArray(customTags)) - tags = []; - else { - const keys = Array.from(schemas.keys()) - .filter(key => key !== 'yaml11') - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); - } - } - if (Array.isArray(customTags)) { - for (const tag of customTags) - tags = tags.concat(tag); - } - else if (typeof customTags === 'function') { - tags = customTags(tags.slice()); - } - return tags.map(tag => { - if (typeof tag !== 'string') - return tag; - const tagObj = tagsByName[tag]; - if (tagObj) - return tagObj; - const keys = Object.keys(tagsByName) - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`); - }); -} - -export { coreKnownTags, getTags }; diff --git a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js b/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js deleted file mode 100644 index 8021a52..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js +++ /dev/null @@ -1,66 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; -import { stringifyString } from '../../stringify/stringifyString.js'; - -const binary = { - identify: value => value instanceof Uint8Array, // Buffer inherits from Uint8Array - default: false, - tag: 'tag:yaml.org,2002:binary', - /** - * Returns a Buffer in node and an Uint8Array in browsers - * - * To use the resulting buffer as an image, you'll want to do something like: - * - * const blob = new Blob([buffer], { type: 'image/jpeg' }) - * document.querySelector('#photo').src = URL.createObjectURL(blob) - */ - resolve(src, onError) { - if (typeof Buffer === 'function') { - return Buffer.from(src, 'base64'); - } - else if (typeof atob === 'function') { - // On IE 11, atob() can't handle newlines - const str = atob(src.replace(/[\n\r]/g, '')); - const buffer = new Uint8Array(str.length); - for (let i = 0; i < str.length; ++i) - buffer[i] = str.charCodeAt(i); - return buffer; - } - else { - onError('This environment does not support reading binary tags; either Buffer or atob is required'); - return src; - } - }, - stringify({ comment, type, value }, ctx, onComment, onChompKeep) { - const buf = value; // checked earlier by binary.identify() - let str; - if (typeof Buffer === 'function') { - str = - buf instanceof Buffer - ? buf.toString('base64') - : Buffer.from(buf.buffer).toString('base64'); - } - else if (typeof btoa === 'function') { - let s = ''; - for (let i = 0; i < buf.length; ++i) - s += String.fromCharCode(buf[i]); - str = btoa(s); - } - else { - throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); - } - if (!type) - type = Scalar.BLOCK_LITERAL; - if (type !== Scalar.QUOTE_DOUBLE) { - const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); - const n = Math.ceil(str.length / lineWidth); - const lines = new Array(n); - for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { - lines[i] = str.substr(o, lineWidth); - } - str = lines.join(type === Scalar.BLOCK_LITERAL ? '\n' : ' '); - } - return stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); - } -}; - -export { binary }; diff --git a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js b/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js deleted file mode 100644 index 999b59d..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js +++ /dev/null @@ -1,26 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; - -function boolStringify({ value, source }, ctx) { - const boolObj = value ? trueTag : falseTag; - if (source && boolObj.test.test(source)) - return source; - return value ? ctx.options.trueStr : ctx.options.falseStr; -} -const trueTag = { - identify: value => value === true, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, - resolve: () => new Scalar(true), - stringify: boolStringify -}; -const falseTag = { - identify: value => value === false, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/, - resolve: () => new Scalar(false), - stringify: boolStringify -}; - -export { falseTag, trueTag }; diff --git a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/float.js b/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/float.js deleted file mode 100644 index 2f06117..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/float.js +++ /dev/null @@ -1,46 +0,0 @@ -import { Scalar } from '../../nodes/Scalar.js'; -import { stringifyNumber } from '../../stringify/stringifyNumber.js'; - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/, - resolve: (str) => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, - resolve: (str) => parseFloat(str.replace(/_/g, '')), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, - resolve(str) { - const node = new Scalar(parseFloat(str.replace(/_/g, ''))); - const dot = str.indexOf('.'); - if (dot !== -1) { - const f = str.substring(dot + 1).replace(/_/g, ''); - if (f[f.length - 1] === '0') - node.minFractionDigits = f.length; - } - return node; - }, - stringify: stringifyNumber -}; - -export { float, floatExp, floatNaN }; diff --git a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/int.js b/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/int.js deleted file mode 100644 index f572823..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/int.js +++ /dev/null @@ -1,71 +0,0 @@ -import { stringifyNumber } from '../../stringify/stringifyNumber.js'; - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -function intResolve(str, offset, radix, { intAsBigInt }) { - const sign = str[0]; - if (sign === '-' || sign === '+') - offset += 1; - str = str.substring(offset).replace(/_/g, ''); - if (intAsBigInt) { - switch (radix) { - case 2: - str = `0b${str}`; - break; - case 8: - str = `0o${str}`; - break; - case 16: - str = `0x${str}`; - break; - } - const n = BigInt(str); - return sign === '-' ? BigInt(-1) * n : n; - } - const n = parseInt(str, radix); - return sign === '-' ? -1 * n : n; -} -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value)) { - const str = value.toString(radix); - return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; - } - return stringifyNumber(node); -} -const intBin = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'BIN', - test: /^[-+]?0b[0-1_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), - stringify: node => intStringify(node, 2, '0b') -}; -const intOct = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^[-+]?0[0-7_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), - stringify: node => intStringify(node, 8, '0') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9][0-9_]*$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber -}; -const intHex = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^[-+]?0x[0-9a-fA-F_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -export { int, intBin, intHex, intOct }; diff --git a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js b/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js deleted file mode 100644 index 5574ac5..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js +++ /dev/null @@ -1,74 +0,0 @@ -import { isScalar, isPair } from '../../nodes/identity.js'; -import { toJS } from '../../nodes/toJS.js'; -import { YAMLMap } from '../../nodes/YAMLMap.js'; -import { YAMLSeq } from '../../nodes/YAMLSeq.js'; -import { resolvePairs, createPairs } from './pairs.js'; - -class YAMLOMap extends YAMLSeq { - constructor() { - super(); - this.add = YAMLMap.prototype.add.bind(this); - this.delete = YAMLMap.prototype.delete.bind(this); - this.get = YAMLMap.prototype.get.bind(this); - this.has = YAMLMap.prototype.has.bind(this); - this.set = YAMLMap.prototype.set.bind(this); - this.tag = YAMLOMap.tag; - } - /** - * If `ctx` is given, the return type is actually `Map`, - * but TypeScript won't allow widening the signature of a child method. - */ - toJSON(_, ctx) { - if (!ctx) - return super.toJSON(_); - const map = new Map(); - if (ctx?.onCreate) - ctx.onCreate(map); - for (const pair of this.items) { - let key, value; - if (isPair(pair)) { - key = toJS(pair.key, '', ctx); - value = toJS(pair.value, key, ctx); - } - else { - key = toJS(pair, '', ctx); - } - if (map.has(key)) - throw new Error('Ordered maps must not include duplicate keys'); - map.set(key, value); - } - return map; - } - static from(schema, iterable, ctx) { - const pairs = createPairs(schema, iterable, ctx); - const omap = new this(); - omap.items = pairs.items; - return omap; - } -} -YAMLOMap.tag = 'tag:yaml.org,2002:omap'; -const omap = { - collection: 'seq', - identify: value => value instanceof Map, - nodeClass: YAMLOMap, - default: false, - tag: 'tag:yaml.org,2002:omap', - resolve(seq, onError) { - const pairs = resolvePairs(seq, onError); - const seenKeys = []; - for (const { key } of pairs.items) { - if (isScalar(key)) { - if (seenKeys.includes(key.value)) { - onError(`Ordered maps must not include duplicate keys: ${key.value}`); - } - else { - seenKeys.push(key.value); - } - } - } - return Object.assign(new YAMLOMap(), pairs); - }, - createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx) -}; - -export { YAMLOMap, omap }; diff --git a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js b/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js deleted file mode 100644 index 18e3020..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js +++ /dev/null @@ -1,78 +0,0 @@ -import { isSeq, isPair, isMap } from '../../nodes/identity.js'; -import { Pair, createPair } from '../../nodes/Pair.js'; -import { Scalar } from '../../nodes/Scalar.js'; -import { YAMLSeq } from '../../nodes/YAMLSeq.js'; - -function resolvePairs(seq, onError) { - if (isSeq(seq)) { - for (let i = 0; i < seq.items.length; ++i) { - let item = seq.items[i]; - if (isPair(item)) - continue; - else if (isMap(item)) { - if (item.items.length > 1) - onError('Each pair must have its own sequence indicator'); - const pair = item.items[0] || new Pair(new Scalar(null)); - if (item.commentBefore) - pair.key.commentBefore = pair.key.commentBefore - ? `${item.commentBefore}\n${pair.key.commentBefore}` - : item.commentBefore; - if (item.comment) { - const cn = pair.value ?? pair.key; - cn.comment = cn.comment - ? `${item.comment}\n${cn.comment}` - : item.comment; - } - item = pair; - } - seq.items[i] = isPair(item) ? item : new Pair(item); - } - } - else - onError('Expected a sequence for this tag'); - return seq; -} -function createPairs(schema, iterable, ctx) { - const { replacer } = ctx; - const pairs = new YAMLSeq(schema); - pairs.tag = 'tag:yaml.org,2002:pairs'; - let i = 0; - if (iterable && Symbol.iterator in Object(iterable)) - for (let it of iterable) { - if (typeof replacer === 'function') - it = replacer.call(iterable, String(i++), it); - let key, value; - if (Array.isArray(it)) { - if (it.length === 2) { - key = it[0]; - value = it[1]; - } - else - throw new TypeError(`Expected [key, value] tuple: ${it}`); - } - else if (it && it instanceof Object) { - const keys = Object.keys(it); - if (keys.length === 1) { - key = keys[0]; - value = it[key]; - } - else { - throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`); - } - } - else { - key = it; - } - pairs.items.push(createPair(key, value, ctx)); - } - return pairs; -} -const pairs = { - collection: 'seq', - default: false, - tag: 'tag:yaml.org,2002:pairs', - resolve: resolvePairs, - createNode: createPairs -}; - -export { createPairs, pairs, resolvePairs }; diff --git a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js b/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js deleted file mode 100644 index dc5be5f..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js +++ /dev/null @@ -1,37 +0,0 @@ -import { map } from '../common/map.js'; -import { nullTag } from '../common/null.js'; -import { seq } from '../common/seq.js'; -import { string } from '../common/string.js'; -import { binary } from './binary.js'; -import { trueTag, falseTag } from './bool.js'; -import { floatNaN, floatExp, float } from './float.js'; -import { intBin, intOct, int, intHex } from './int.js'; -import { omap } from './omap.js'; -import { pairs } from './pairs.js'; -import { set } from './set.js'; -import { intTime, floatTime, timestamp } from './timestamp.js'; - -const schema = [ - map, - seq, - string, - nullTag, - trueTag, - falseTag, - intBin, - intOct, - int, - intHex, - floatNaN, - floatExp, - float, - binary, - omap, - pairs, - set, - intTime, - floatTime, - timestamp -]; - -export { schema }; diff --git a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/set.js b/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/set.js deleted file mode 100644 index a3cf4ec..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/set.js +++ /dev/null @@ -1,93 +0,0 @@ -import { isMap, isPair, isScalar } from '../../nodes/identity.js'; -import { Pair, createPair } from '../../nodes/Pair.js'; -import { YAMLMap, findPair } from '../../nodes/YAMLMap.js'; - -class YAMLSet extends YAMLMap { - constructor(schema) { - super(schema); - this.tag = YAMLSet.tag; - } - add(key) { - let pair; - if (isPair(key)) - pair = key; - else if (key && - typeof key === 'object' && - 'key' in key && - 'value' in key && - key.value === null) - pair = new Pair(key.key, null); - else - pair = new Pair(key, null); - const prev = findPair(this.items, pair.key); - if (!prev) - this.items.push(pair); - } - /** - * If `keepPair` is `true`, returns the Pair matching `key`. - * Otherwise, returns the value of that Pair's key. - */ - get(key, keepPair) { - const pair = findPair(this.items, key); - return !keepPair && isPair(pair) - ? isScalar(pair.key) - ? pair.key.value - : pair.key - : pair; - } - set(key, value) { - if (typeof value !== 'boolean') - throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); - const prev = findPair(this.items, key); - if (prev && !value) { - this.items.splice(this.items.indexOf(prev), 1); - } - else if (!prev && value) { - this.items.push(new Pair(key)); - } - } - toJSON(_, ctx) { - return super.toJSON(_, ctx, Set); - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - if (this.hasAllNullValues(true)) - return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); - else - throw new Error('Set items must all have null values'); - } - static from(schema, iterable, ctx) { - const { replacer } = ctx; - const set = new this(schema); - if (iterable && Symbol.iterator in Object(iterable)) - for (let value of iterable) { - if (typeof replacer === 'function') - value = replacer.call(iterable, value, value); - set.items.push(createPair(value, null, ctx)); - } - return set; - } -} -YAMLSet.tag = 'tag:yaml.org,2002:set'; -const set = { - collection: 'map', - identify: value => value instanceof Set, - nodeClass: YAMLSet, - default: false, - tag: 'tag:yaml.org,2002:set', - createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx), - resolve(map, onError) { - if (isMap(map)) { - if (map.hasAllNullValues(true)) - return Object.assign(new YAMLSet(), map); - else - onError('Set items must all have null values'); - } - else - onError('Expected a mapping for this tag'); - return map; - } -}; - -export { YAMLSet, set }; diff --git a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js b/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js deleted file mode 100644 index 58986cd..0000000 --- a/bin/node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js +++ /dev/null @@ -1,101 +0,0 @@ -import { stringifyNumber } from '../../stringify/stringifyNumber.js'; - -/** Internal types handle bigint as number, because TS can't figure it out. */ -function parseSexagesimal(str, asBigInt) { - const sign = str[0]; - const parts = sign === '-' || sign === '+' ? str.substring(1) : str; - const num = (n) => asBigInt ? BigInt(n) : Number(n); - const res = parts - .replace(/_/g, '') - .split(':') - .reduce((res, p) => res * num(60) + num(p), num(0)); - return (sign === '-' ? num(-1) * res : res); -} -/** - * hhhh:mm:ss.sss - * - * Internal types handle bigint as number, because TS can't figure it out. - */ -function stringifySexagesimal(node) { - let { value } = node; - let num = (n) => n; - if (typeof value === 'bigint') - num = n => BigInt(n); - else if (isNaN(value) || !isFinite(value)) - return stringifyNumber(node); - let sign = ''; - if (value < 0) { - sign = '-'; - value *= num(-1); - } - const _60 = num(60); - const parts = [value % _60]; // seconds, including ms - if (value < 60) { - parts.unshift(0); // at least one : is required - } - else { - value = (value - parts[0]) / _60; - parts.unshift(value % _60); // minutes - if (value >= 60) { - value = (value - parts[0]) / _60; - parts.unshift(value); // hours - } - } - return (sign + - parts - .map(n => String(n).padStart(2, '0')) - .join(':') - .replace(/000000\d*$/, '') // % 60 may introduce error - ); -} -const intTime = { - identify: value => typeof value === 'bigint' || Number.isInteger(value), - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, - resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), - stringify: stringifySexagesimal -}; -const floatTime = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, - resolve: str => parseSexagesimal(str, false), - stringify: stringifySexagesimal -}; -const timestamp = { - identify: value => value instanceof Date, - default: true, - tag: 'tag:yaml.org,2002:timestamp', - // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part - // may be omitted altogether, resulting in a date format. In such a case, the time part is - // assumed to be 00:00:00Z (start of day, UTC). - test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd - '(?:' + // time is optional - '(?:t|T|[ \\t]+)' + // t | T | whitespace - '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? - '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 - ')?$'), - resolve(str) { - const match = str.match(timestamp.test); - if (!match) - throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); - const [, year, month, day, hour, minute, second] = match.map(Number); - const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; - let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); - const tz = match[8]; - if (tz && tz !== 'Z') { - let d = parseSexagesimal(tz, false); - if (Math.abs(d) < 30) - d *= 60; - date -= 60000 * d; - } - return new Date(date); - }, - stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '') -}; - -export { floatTime, intTime, timestamp }; diff --git a/bin/node_modules/yaml/browser/dist/stringify/foldFlowLines.js b/bin/node_modules/yaml/browser/dist/stringify/foldFlowLines.js deleted file mode 100644 index 2a55426..0000000 --- a/bin/node_modules/yaml/browser/dist/stringify/foldFlowLines.js +++ /dev/null @@ -1,144 +0,0 @@ -const FOLD_FLOW = 'flow'; -const FOLD_BLOCK = 'block'; -const FOLD_QUOTED = 'quoted'; -/** - * Tries to keep input at up to `lineWidth` characters, splitting only on spaces - * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are - * terminated with `\n` and started with `indent`. - */ -function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { - if (!lineWidth || lineWidth < 0) - return text; - const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); - if (text.length <= endStep) - return text; - const folds = []; - const escapedFolds = {}; - let end = lineWidth - indent.length; - if (typeof indentAtStart === 'number') { - if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) - folds.push(0); - else - end = lineWidth - indentAtStart; - } - let split = undefined; - let prev = undefined; - let overflow = false; - let i = -1; - let escStart = -1; - let escEnd = -1; - if (mode === FOLD_BLOCK) { - i = consumeMoreIndentedLines(text, i, indent.length); - if (i !== -1) - end = i + endStep; - } - for (let ch; (ch = text[(i += 1)]);) { - if (mode === FOLD_QUOTED && ch === '\\') { - escStart = i; - switch (text[i + 1]) { - case 'x': - i += 3; - break; - case 'u': - i += 5; - break; - case 'U': - i += 9; - break; - default: - i += 1; - } - escEnd = i; - } - if (ch === '\n') { - if (mode === FOLD_BLOCK) - i = consumeMoreIndentedLines(text, i, indent.length); - end = i + indent.length + endStep; - split = undefined; - } - else { - if (ch === ' ' && - prev && - prev !== ' ' && - prev !== '\n' && - prev !== '\t') { - // space surrounded by non-space can be replaced with newline + indent - const next = text[i + 1]; - if (next && next !== ' ' && next !== '\n' && next !== '\t') - split = i; - } - if (i >= end) { - if (split) { - folds.push(split); - end = split + endStep; - split = undefined; - } - else if (mode === FOLD_QUOTED) { - // white-space collected at end may stretch past lineWidth - while (prev === ' ' || prev === '\t') { - prev = ch; - ch = text[(i += 1)]; - overflow = true; - } - // Account for newline escape, but don't break preceding escape - const j = i > escEnd + 1 ? i - 2 : escStart - 1; - // Bail out if lineWidth & minContentWidth are shorter than an escape string - if (escapedFolds[j]) - return text; - folds.push(j); - escapedFolds[j] = true; - end = j + endStep; - split = undefined; - } - else { - overflow = true; - } - } - } - prev = ch; - } - if (overflow && onOverflow) - onOverflow(); - if (folds.length === 0) - return text; - if (onFold) - onFold(); - let res = text.slice(0, folds[0]); - for (let i = 0; i < folds.length; ++i) { - const fold = folds[i]; - const end = folds[i + 1] || text.length; - if (fold === 0) - res = `\n${indent}${text.slice(0, end)}`; - else { - if (mode === FOLD_QUOTED && escapedFolds[fold]) - res += `${text[fold]}\\`; - res += `\n${indent}${text.slice(fold + 1, end)}`; - } - } - return res; -} -/** - * Presumes `i + 1` is at the start of a line - * @returns index of last newline in more-indented block - */ -function consumeMoreIndentedLines(text, i, indent) { - let end = i; - let start = i + 1; - let ch = text[start]; - while (ch === ' ' || ch === '\t') { - if (i < start + indent) { - ch = text[++i]; - } - else { - do { - ch = text[++i]; - } while (ch && ch !== '\n'); - end = i; - start = i + 1; - ch = text[start]; - } - } - return end; -} - -export { FOLD_BLOCK, FOLD_FLOW, FOLD_QUOTED, foldFlowLines }; diff --git a/bin/node_modules/yaml/browser/dist/stringify/stringify.js b/bin/node_modules/yaml/browser/dist/stringify/stringify.js deleted file mode 100644 index 760ec4c..0000000 --- a/bin/node_modules/yaml/browser/dist/stringify/stringify.js +++ /dev/null @@ -1,124 +0,0 @@ -import { anchorIsValid } from '../doc/anchors.js'; -import { isPair, isAlias, isNode, isScalar, isCollection } from '../nodes/identity.js'; -import { stringifyComment } from './stringifyComment.js'; -import { stringifyString } from './stringifyString.js'; - -function createStringifyContext(doc, options) { - const opt = Object.assign({ - blockQuote: true, - commentString: stringifyComment, - defaultKeyType: null, - defaultStringType: 'PLAIN', - directives: null, - doubleQuotedAsJSON: false, - doubleQuotedMinMultiLineLength: 40, - falseStr: 'false', - flowCollectionPadding: true, - indentSeq: true, - lineWidth: 80, - minContentWidth: 20, - nullStr: 'null', - simpleKeys: false, - singleQuote: null, - trueStr: 'true', - verifyAliasOrder: true - }, doc.schema.toStringOptions, options); - let inFlow; - switch (opt.collectionStyle) { - case 'block': - inFlow = false; - break; - case 'flow': - inFlow = true; - break; - default: - inFlow = null; - } - return { - anchors: new Set(), - doc, - flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '', - indent: '', - indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', - inFlow, - options: opt - }; -} -function getTagObject(tags, item) { - if (item.tag) { - const match = tags.filter(t => t.tag === item.tag); - if (match.length > 0) - return match.find(t => t.format === item.format) ?? match[0]; - } - let tagObj = undefined; - let obj; - if (isScalar(item)) { - obj = item.value; - const match = tags.filter(t => t.identify?.(obj)); - tagObj = - match.find(t => t.format === item.format) ?? match.find(t => !t.format); - } - else { - obj = item; - tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); - } - if (!tagObj) { - const name = obj?.constructor?.name ?? typeof obj; - throw new Error(`Tag not resolved for ${name} value`); - } - return tagObj; -} -// needs to be called before value stringifier to allow for circular anchor refs -function stringifyProps(node, tagObj, { anchors, doc }) { - if (!doc.directives) - return ''; - const props = []; - const anchor = (isScalar(node) || isCollection(node)) && node.anchor; - if (anchor && anchorIsValid(anchor)) { - anchors.add(anchor); - props.push(`&${anchor}`); - } - const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; - if (tag) - props.push(doc.directives.tagString(tag)); - return props.join(' '); -} -function stringify(item, ctx, onComment, onChompKeep) { - if (isPair(item)) - return item.toString(ctx, onComment, onChompKeep); - if (isAlias(item)) { - if (ctx.doc.directives) - return item.toString(ctx); - if (ctx.resolvedAliases?.has(item)) { - throw new TypeError(`Cannot stringify circular structure without alias nodes`); - } - else { - if (ctx.resolvedAliases) - ctx.resolvedAliases.add(item); - else - ctx.resolvedAliases = new Set([item]); - item = item.resolve(ctx.doc); - } - } - let tagObj = undefined; - const node = isNode(item) - ? item - : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); - if (!tagObj) - tagObj = getTagObject(ctx.doc.schema.tags, node); - const props = stringifyProps(node, tagObj, ctx); - if (props.length > 0) - ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; - const str = typeof tagObj.stringify === 'function' - ? tagObj.stringify(node, ctx, onComment, onChompKeep) - : isScalar(node) - ? stringifyString(node, ctx, onComment, onChompKeep) - : node.toString(ctx, onComment, onChompKeep); - if (!props) - return str; - return isScalar(node) || str[0] === '{' || str[0] === '[' - ? `${props} ${str}` - : `${props}\n${ctx.indent}${str}`; -} - -export { createStringifyContext, stringify }; diff --git a/bin/node_modules/yaml/browser/dist/stringify/stringifyCollection.js b/bin/node_modules/yaml/browser/dist/stringify/stringifyCollection.js deleted file mode 100644 index 9019400..0000000 --- a/bin/node_modules/yaml/browser/dist/stringify/stringifyCollection.js +++ /dev/null @@ -1,143 +0,0 @@ -import { isNode, isPair } from '../nodes/identity.js'; -import { stringify } from './stringify.js'; -import { lineComment, indentComment } from './stringifyComment.js'; - -function stringifyCollection(collection, ctx, options) { - const flow = ctx.inFlow ?? collection.flow; - const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; - return stringify(collection, ctx, options); -} -function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { - const { indent, options: { commentString } } = ctx; - const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); - let chompKeep = false; // flag for the preceding node's status - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (isNode(item)) { - if (!chompKeep && item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, chompKeep); - if (item.comment) - comment = item.comment; - } - else if (isPair(item)) { - const ik = isNode(item.key) ? item.key : null; - if (ik) { - if (!chompKeep && ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); - } - } - chompKeep = false; - let str = stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true)); - if (comment) - str += lineComment(str, itemIndent, commentString(comment)); - if (chompKeep && comment) - chompKeep = false; - lines.push(blockItemPrefix + str); - } - let str; - if (lines.length === 0) { - str = flowChars.start + flowChars.end; - } - else { - str = lines[0]; - for (let i = 1; i < lines.length; ++i) { - const line = lines[i]; - str += line ? `\n${indent}${line}` : '\n'; - } - } - if (comment) { - str += '\n' + indentComment(commentString(comment), indent); - if (onComment) - onComment(); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; -} -function stringifyFlowCollection({ items }, ctx, { flowChars, itemIndent }) { - const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx; - itemIndent += indentStep; - const itemCtx = Object.assign({}, ctx, { - indent: itemIndent, - inFlow: true, - type: null - }); - let reqNewline = false; - let linesAtValue = 0; - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (isNode(item)) { - if (item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, false); - if (item.comment) - comment = item.comment; - } - else if (isPair(item)) { - const ik = isNode(item.key) ? item.key : null; - if (ik) { - if (ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, false); - if (ik.comment) - reqNewline = true; - } - const iv = isNode(item.value) ? item.value : null; - if (iv) { - if (iv.comment) - comment = iv.comment; - if (iv.commentBefore) - reqNewline = true; - } - else if (item.value == null && ik?.comment) { - comment = ik.comment; - } - } - if (comment) - reqNewline = true; - let str = stringify(item, itemCtx, () => (comment = null)); - if (i < items.length - 1) - str += ','; - if (comment) - str += lineComment(str, itemIndent, commentString(comment)); - if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) - reqNewline = true; - lines.push(str); - linesAtValue = lines.length; - } - const { start, end } = flowChars; - if (lines.length === 0) { - return start + end; - } - else { - if (!reqNewline) { - const len = lines.reduce((sum, line) => sum + line.length + 2, 2); - reqNewline = ctx.options.lineWidth > 0 && len > ctx.options.lineWidth; - } - if (reqNewline) { - let str = start; - for (const line of lines) - str += line ? `\n${indentStep}${indent}${line}` : '\n'; - return `${str}\n${indent}${end}`; - } - else { - return `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`; - } - } -} -function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { - if (comment && chompKeep) - comment = comment.replace(/^\n+/, ''); - if (comment) { - const ic = indentComment(commentString(comment), indent); - lines.push(ic.trimStart()); // Avoid double indent on first line - } -} - -export { stringifyCollection }; diff --git a/bin/node_modules/yaml/browser/dist/stringify/stringifyComment.js b/bin/node_modules/yaml/browser/dist/stringify/stringifyComment.js deleted file mode 100644 index f16fc91..0000000 --- a/bin/node_modules/yaml/browser/dist/stringify/stringifyComment.js +++ /dev/null @@ -1,20 +0,0 @@ -/** - * Stringifies a comment. - * - * Empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ -const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); -function indentComment(comment, indent) { - if (/^\n+$/.test(comment)) - return comment.substring(1); - return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; -} -const lineComment = (str, indent, comment) => str.endsWith('\n') - ? indentComment(comment, indent) - : comment.includes('\n') - ? '\n' + indentComment(comment, indent) - : (str.endsWith(' ') ? '' : ' ') + comment; - -export { indentComment, lineComment, stringifyComment }; diff --git a/bin/node_modules/yaml/browser/dist/stringify/stringifyDocument.js b/bin/node_modules/yaml/browser/dist/stringify/stringifyDocument.js deleted file mode 100644 index 2a9defa..0000000 --- a/bin/node_modules/yaml/browser/dist/stringify/stringifyDocument.js +++ /dev/null @@ -1,85 +0,0 @@ -import { isNode } from '../nodes/identity.js'; -import { createStringifyContext, stringify } from './stringify.js'; -import { indentComment, lineComment } from './stringifyComment.js'; - -function stringifyDocument(doc, options) { - const lines = []; - let hasDirectives = options.directives === true; - if (options.directives !== false && doc.directives) { - const dir = doc.directives.toString(doc); - if (dir) { - lines.push(dir); - hasDirectives = true; - } - else if (doc.directives.docStart) - hasDirectives = true; - } - if (hasDirectives) - lines.push('---'); - const ctx = createStringifyContext(doc, options); - const { commentString } = ctx.options; - if (doc.commentBefore) { - if (lines.length !== 1) - lines.unshift(''); - const cs = commentString(doc.commentBefore); - lines.unshift(indentComment(cs, '')); - } - let chompKeep = false; - let contentComment = null; - if (doc.contents) { - if (isNode(doc.contents)) { - if (doc.contents.spaceBefore && hasDirectives) - lines.push(''); - if (doc.contents.commentBefore) { - const cs = commentString(doc.contents.commentBefore); - lines.push(indentComment(cs, '')); - } - // top-level block scalars need to be indented if followed by a comment - ctx.forceBlockIndent = !!doc.comment; - contentComment = doc.contents.comment; - } - const onChompKeep = contentComment ? undefined : () => (chompKeep = true); - let body = stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep); - if (contentComment) - body += lineComment(body, '', commentString(contentComment)); - if ((body[0] === '|' || body[0] === '>') && - lines[lines.length - 1] === '---') { - // Top-level block scalars with a preceding doc marker ought to use the - // same line for their header. - lines[lines.length - 1] = `--- ${body}`; - } - else - lines.push(body); - } - else { - lines.push(stringify(doc.contents, ctx)); - } - if (doc.directives?.docEnd) { - if (doc.comment) { - const cs = commentString(doc.comment); - if (cs.includes('\n')) { - lines.push('...'); - lines.push(indentComment(cs, '')); - } - else { - lines.push(`... ${cs}`); - } - } - else { - lines.push('...'); - } - } - else { - let dc = doc.comment; - if (dc && chompKeep) - dc = dc.replace(/^\n+/, ''); - if (dc) { - if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') - lines.push(''); - lines.push(indentComment(commentString(dc), '')); - } - } - return lines.join('\n') + '\n'; -} - -export { stringifyDocument }; diff --git a/bin/node_modules/yaml/browser/dist/stringify/stringifyNumber.js b/bin/node_modules/yaml/browser/dist/stringify/stringifyNumber.js deleted file mode 100644 index 3fa35f9..0000000 --- a/bin/node_modules/yaml/browser/dist/stringify/stringifyNumber.js +++ /dev/null @@ -1,24 +0,0 @@ -function stringifyNumber({ format, minFractionDigits, tag, value }) { - if (typeof value === 'bigint') - return String(value); - const num = typeof value === 'number' ? value : Number(value); - if (!isFinite(num)) - return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; - let n = JSON.stringify(value); - if (!format && - minFractionDigits && - (!tag || tag === 'tag:yaml.org,2002:float') && - /^\d/.test(n)) { - let i = n.indexOf('.'); - if (i < 0) { - i = n.length; - n += '.'; - } - let d = minFractionDigits - (n.length - i - 1); - while (d-- > 0) - n += '0'; - } - return n; -} - -export { stringifyNumber }; diff --git a/bin/node_modules/yaml/browser/dist/stringify/stringifyPair.js b/bin/node_modules/yaml/browser/dist/stringify/stringifyPair.js deleted file mode 100644 index 353124c..0000000 --- a/bin/node_modules/yaml/browser/dist/stringify/stringifyPair.js +++ /dev/null @@ -1,150 +0,0 @@ -import { isCollection, isNode, isScalar, isSeq } from '../nodes/identity.js'; -import { Scalar } from '../nodes/Scalar.js'; -import { stringify } from './stringify.js'; -import { lineComment, indentComment } from './stringifyComment.js'; - -function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { - const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; - let keyComment = (isNode(key) && key.comment) || null; - if (simpleKeys) { - if (keyComment) { - throw new Error('With simple keys, key nodes cannot have comments'); - } - if (isCollection(key) || (!isNode(key) && typeof key === 'object')) { - const msg = 'With simple keys, collection cannot be used as a key value'; - throw new Error(msg); - } - } - let explicitKey = !simpleKeys && - (!key || - (keyComment && value == null && !ctx.inFlow) || - isCollection(key) || - (isScalar(key) - ? key.type === Scalar.BLOCK_FOLDED || key.type === Scalar.BLOCK_LITERAL - : typeof key === 'object')); - ctx = Object.assign({}, ctx, { - allNullValues: false, - implicitKey: !explicitKey && (simpleKeys || !allNullValues), - indent: indent + indentStep - }); - let keyCommentDone = false; - let chompKeep = false; - let str = stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); - if (!explicitKey && !ctx.inFlow && str.length > 1024) { - if (simpleKeys) - throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); - explicitKey = true; - } - if (ctx.inFlow) { - if (allNullValues || value == null) { - if (keyCommentDone && onComment) - onComment(); - return str === '' ? '?' : explicitKey ? `? ${str}` : str; - } - } - else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { - str = `? ${str}`; - if (keyComment && !keyCommentDone) { - str += lineComment(str, ctx.indent, commentString(keyComment)); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; - } - if (keyCommentDone) - keyComment = null; - if (explicitKey) { - if (keyComment) - str += lineComment(str, ctx.indent, commentString(keyComment)); - str = `? ${str}\n${indent}:`; - } - else { - str = `${str}:`; - if (keyComment) - str += lineComment(str, ctx.indent, commentString(keyComment)); - } - let vsb, vcb, valueComment; - if (isNode(value)) { - vsb = !!value.spaceBefore; - vcb = value.commentBefore; - valueComment = value.comment; - } - else { - vsb = false; - vcb = null; - valueComment = null; - if (value && typeof value === 'object') - value = doc.createNode(value); - } - ctx.implicitKey = false; - if (!explicitKey && !keyComment && isScalar(value)) - ctx.indentAtStart = str.length + 1; - chompKeep = false; - if (!indentSeq && - indentStep.length >= 2 && - !ctx.inFlow && - !explicitKey && - isSeq(value) && - !value.flow && - !value.tag && - !value.anchor) { - // If indentSeq === false, consider '- ' as part of indentation where possible - ctx.indent = ctx.indent.substring(2); - } - let valueCommentDone = false; - const valueStr = stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); - let ws = ' '; - if (keyComment || vsb || vcb) { - ws = vsb ? '\n' : ''; - if (vcb) { - const cs = commentString(vcb); - ws += `\n${indentComment(cs, ctx.indent)}`; - } - if (valueStr === '' && !ctx.inFlow) { - if (ws === '\n') - ws = '\n\n'; - } - else { - ws += `\n${ctx.indent}`; - } - } - else if (!explicitKey && isCollection(value)) { - const vs0 = valueStr[0]; - const nl0 = valueStr.indexOf('\n'); - const hasNewline = nl0 !== -1; - const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0; - if (hasNewline || !flow) { - let hasPropsLine = false; - if (hasNewline && (vs0 === '&' || vs0 === '!')) { - let sp0 = valueStr.indexOf(' '); - if (vs0 === '&' && - sp0 !== -1 && - sp0 < nl0 && - valueStr[sp0 + 1] === '!') { - sp0 = valueStr.indexOf(' ', sp0 + 1); - } - if (sp0 === -1 || nl0 < sp0) - hasPropsLine = true; - } - if (!hasPropsLine) - ws = `\n${ctx.indent}`; - } - } - else if (valueStr === '' || valueStr[0] === '\n') { - ws = ''; - } - str += ws + valueStr; - if (ctx.inFlow) { - if (valueCommentDone && onComment) - onComment(); - } - else if (valueComment && !valueCommentDone) { - str += lineComment(str, ctx.indent, commentString(valueComment)); - } - else if (chompKeep && onChompKeep) { - onChompKeep(); - } - return str; -} - -export { stringifyPair }; diff --git a/bin/node_modules/yaml/browser/dist/stringify/stringifyString.js b/bin/node_modules/yaml/browser/dist/stringify/stringifyString.js deleted file mode 100644 index 2f1ceb6..0000000 --- a/bin/node_modules/yaml/browser/dist/stringify/stringifyString.js +++ /dev/null @@ -1,328 +0,0 @@ -import { Scalar } from '../nodes/Scalar.js'; -import { foldFlowLines, FOLD_QUOTED, FOLD_FLOW, FOLD_BLOCK } from './foldFlowLines.js'; - -const getFoldOptions = (ctx, isBlock) => ({ - indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart, - lineWidth: ctx.options.lineWidth, - minContentWidth: ctx.options.minContentWidth -}); -// Also checks for lines starting with %, as parsing the output as YAML 1.1 will -// presume that's starting a new document. -const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); -function lineLengthOverLimit(str, lineWidth, indentLength) { - if (!lineWidth || lineWidth < 0) - return false; - const limit = lineWidth - indentLength; - const strLen = str.length; - if (strLen <= limit) - return false; - for (let i = 0, start = 0; i < strLen; ++i) { - if (str[i] === '\n') { - if (i - start > limit) - return true; - start = i + 1; - if (strLen - start <= limit) - return false; - } - } - return true; -} -function doubleQuotedString(value, ctx) { - const json = JSON.stringify(value); - if (ctx.options.doubleQuotedAsJSON) - return json; - const { implicitKey } = ctx; - const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - let str = ''; - let start = 0; - for (let i = 0, ch = json[i]; ch; ch = json[++i]) { - if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { - // space before newline needs to be escaped to not be folded - str += json.slice(start, i) + '\\ '; - i += 1; - start = i; - ch = '\\'; - } - if (ch === '\\') - switch (json[i + 1]) { - case 'u': - { - str += json.slice(start, i); - const code = json.substr(i + 2, 4); - switch (code) { - case '0000': - str += '\\0'; - break; - case '0007': - str += '\\a'; - break; - case '000b': - str += '\\v'; - break; - case '001b': - str += '\\e'; - break; - case '0085': - str += '\\N'; - break; - case '00a0': - str += '\\_'; - break; - case '2028': - str += '\\L'; - break; - case '2029': - str += '\\P'; - break; - default: - if (code.substr(0, 2) === '00') - str += '\\x' + code.substr(2); - else - str += json.substr(i, 6); - } - i += 5; - start = i + 1; - } - break; - case 'n': - if (implicitKey || - json[i + 2] === '"' || - json.length < minMultiLineLength) { - i += 1; - } - else { - // folding will eat first newline - str += json.slice(start, i) + '\n\n'; - while (json[i + 2] === '\\' && - json[i + 3] === 'n' && - json[i + 4] !== '"') { - str += '\n'; - i += 2; - } - str += indent; - // space after newline needs to be escaped to not be folded - if (json[i + 2] === ' ') - str += '\\'; - i += 1; - start = i + 1; - } - break; - default: - i += 1; - } - } - str = start ? str + json.slice(start) : json; - return implicitKey - ? str - : foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx, false)); -} -function singleQuotedString(value, ctx) { - if (ctx.options.singleQuote === false || - (ctx.implicitKey && value.includes('\n')) || - /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline - ) - return doubleQuotedString(value, ctx); - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; - return ctx.implicitKey - ? res - : foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx, false)); -} -function quotedString(value, ctx) { - const { singleQuote } = ctx.options; - let qs; - if (singleQuote === false) - qs = doubleQuotedString; - else { - const hasDouble = value.includes('"'); - const hasSingle = value.includes("'"); - if (hasDouble && !hasSingle) - qs = singleQuotedString; - else if (hasSingle && !hasDouble) - qs = doubleQuotedString; - else - qs = singleQuote ? singleQuotedString : doubleQuotedString; - } - return qs(value, ctx); -} -// The negative lookbehind avoids a polynomial search, -// but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind -let blockEndNewlines; -try { - blockEndNewlines = new RegExp('(^|(?\n'; - // determine chomping from whitespace at value end - let chomp; - let endStart; - for (endStart = value.length; endStart > 0; --endStart) { - const ch = value[endStart - 1]; - if (ch !== '\n' && ch !== '\t' && ch !== ' ') - break; - } - let end = value.substring(endStart); - const endNlPos = end.indexOf('\n'); - if (endNlPos === -1) { - chomp = '-'; // strip - } - else if (value === end || endNlPos !== end.length - 1) { - chomp = '+'; // keep - if (onChompKeep) - onChompKeep(); - } - else { - chomp = ''; // clip - } - if (end) { - value = value.slice(0, -end.length); - if (end[end.length - 1] === '\n') - end = end.slice(0, -1); - end = end.replace(blockEndNewlines, `$&${indent}`); - } - // determine indent indicator from whitespace at value start - let startWithSpace = false; - let startEnd; - let startNlPos = -1; - for (startEnd = 0; startEnd < value.length; ++startEnd) { - const ch = value[startEnd]; - if (ch === ' ') - startWithSpace = true; - else if (ch === '\n') - startNlPos = startEnd; - else - break; - } - let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); - if (start) { - value = value.substring(start.length); - start = start.replace(/\n+/g, `$&${indent}`); - } - const indentSize = indent ? '2' : '1'; // root is at -1 - let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp; - if (comment) { - header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); - if (onComment) - onComment(); - } - if (literal) { - value = value.replace(/\n+/g, `$&${indent}`); - return `${header}\n${indent}${start}${value}${end}`; - } - value = value - .replace(/\n+/g, '\n$&') - .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded - // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent - .replace(/\n+/g, `$&${indent}`); - const body = foldFlowLines(`${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx, true)); - return `${header}\n${indent}${body}`; -} -function plainString(item, ctx, onComment, onChompKeep) { - const { type, value } = item; - const { actualString, implicitKey, indent, indentStep, inFlow } = ctx; - if ((implicitKey && value.includes('\n')) || - (inFlow && /[[\]{},]/.test(value))) { - return quotedString(value, ctx); - } - if (!value || - /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { - // not allowed: - // - empty string, '-' or '?' - // - start with an indicator character (except [?:-]) or /[?-] / - // - '\n ', ': ' or ' \n' anywhere - // - '#' not preceded by a non-space char - // - end with ' ' or ':' - return implicitKey || inFlow || !value.includes('\n') - ? quotedString(value, ctx) - : blockString(item, ctx, onComment, onChompKeep); - } - if (!implicitKey && - !inFlow && - type !== Scalar.PLAIN && - value.includes('\n')) { - // Where allowed & type not set explicitly, prefer block style for multiline strings - return blockString(item, ctx, onComment, onChompKeep); - } - if (containsDocumentMarker(value)) { - if (indent === '') { - ctx.forceBlockIndent = true; - return blockString(item, ctx, onComment, onChompKeep); - } - else if (implicitKey && indent === indentStep) { - return quotedString(value, ctx); - } - } - const str = value.replace(/\n+/g, `$&\n${indent}`); - // Verify that output will be parsed as a string, as e.g. plain numbers and - // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), - // and others in v1.1. - if (actualString) { - const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); - const { compat, tags } = ctx.doc.schema; - if (tags.some(test) || compat?.some(test)) - return quotedString(value, ctx); - } - return implicitKey - ? str - : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx, false)); -} -function stringifyString(item, ctx, onComment, onChompKeep) { - const { implicitKey, inFlow } = ctx; - const ss = typeof item.value === 'string' - ? item - : Object.assign({}, item, { value: String(item.value) }); - let { type } = item; - if (type !== Scalar.QUOTE_DOUBLE) { - // force double quotes on control characters & unpaired surrogates - if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) - type = Scalar.QUOTE_DOUBLE; - } - const _stringify = (_type) => { - switch (_type) { - case Scalar.BLOCK_FOLDED: - case Scalar.BLOCK_LITERAL: - return implicitKey || inFlow - ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers - : blockString(ss, ctx, onComment, onChompKeep); - case Scalar.QUOTE_DOUBLE: - return doubleQuotedString(ss.value, ctx); - case Scalar.QUOTE_SINGLE: - return singleQuotedString(ss.value, ctx); - case Scalar.PLAIN: - return plainString(ss, ctx, onComment, onChompKeep); - default: - return null; - } - }; - let res = _stringify(type); - if (res === null) { - const { defaultKeyType, defaultStringType } = ctx.options; - const t = (implicitKey && defaultKeyType) || defaultStringType; - res = _stringify(t); - if (res === null) - throw new Error(`Unsupported default string type ${t}`); - } - return res; -} - -export { stringifyString }; diff --git a/bin/node_modules/yaml/browser/dist/util.js b/bin/node_modules/yaml/browser/dist/util.js deleted file mode 100644 index ec59413..0000000 --- a/bin/node_modules/yaml/browser/dist/util.js +++ /dev/null @@ -1,11 +0,0 @@ -export { createNode } from './doc/createNode.js'; -export { debug, warn } from './log.js'; -export { createPair } from './nodes/Pair.js'; -export { findPair } from './nodes/YAMLMap.js'; -export { toJS } from './nodes/toJS.js'; -export { map as mapTag } from './schema/common/map.js'; -export { seq as seqTag } from './schema/common/seq.js'; -export { string as stringTag } from './schema/common/string.js'; -export { foldFlowLines } from './stringify/foldFlowLines.js'; -export { stringifyNumber } from './stringify/stringifyNumber.js'; -export { stringifyString } from './stringify/stringifyString.js'; diff --git a/bin/node_modules/yaml/browser/dist/visit.js b/bin/node_modules/yaml/browser/dist/visit.js deleted file mode 100644 index b5eef41..0000000 --- a/bin/node_modules/yaml/browser/dist/visit.js +++ /dev/null @@ -1,233 +0,0 @@ -import { isDocument, isNode, isPair, isCollection, isMap, isSeq, isScalar, isAlias } from './nodes/identity.js'; - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove node'); -/** - * Apply a visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -function visit(node, visitor) { - const visitor_ = initVisitor(visitor); - if (isDocument(node)) { - const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - visit_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current node */ -visit.SKIP = SKIP; -/** Remove the current node */ -visit.REMOVE = REMOVE; -function visit_(key, node, visitor, path) { - const ctrl = callVisitor(key, node, visitor, path); - if (isNode(ctrl) || isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visit_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = visit_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = visit_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = visit_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -/** - * Apply an async visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `Promise`: Must resolve to one of the following values - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -async function visitAsync(node, visitor) { - const visitor_ = initVisitor(visitor); - if (isDocument(node)) { - const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - await visitAsync_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visitAsync.BREAK = BREAK; -/** Do not visit the children of the current node */ -visitAsync.SKIP = SKIP; -/** Remove the current node */ -visitAsync.REMOVE = REMOVE; -async function visitAsync_(key, node, visitor, path) { - const ctrl = await callVisitor(key, node, visitor, path); - if (isNode(ctrl) || isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visitAsync_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = await visitAsync_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = await visitAsync_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = await visitAsync_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -function initVisitor(visitor) { - if (typeof visitor === 'object' && - (visitor.Collection || visitor.Node || visitor.Value)) { - return Object.assign({ - Alias: visitor.Node, - Map: visitor.Node, - Scalar: visitor.Node, - Seq: visitor.Node - }, visitor.Value && { - Map: visitor.Value, - Scalar: visitor.Value, - Seq: visitor.Value - }, visitor.Collection && { - Map: visitor.Collection, - Seq: visitor.Collection - }, visitor); - } - return visitor; -} -function callVisitor(key, node, visitor, path) { - if (typeof visitor === 'function') - return visitor(key, node, path); - if (isMap(node)) - return visitor.Map?.(key, node, path); - if (isSeq(node)) - return visitor.Seq?.(key, node, path); - if (isPair(node)) - return visitor.Pair?.(key, node, path); - if (isScalar(node)) - return visitor.Scalar?.(key, node, path); - if (isAlias(node)) - return visitor.Alias?.(key, node, path); - return undefined; -} -function replaceNode(key, path, node) { - const parent = path[path.length - 1]; - if (isCollection(parent)) { - parent.items[key] = node; - } - else if (isPair(parent)) { - if (key === 'key') - parent.key = node; - else - parent.value = node; - } - else if (isDocument(parent)) { - parent.contents = node; - } - else { - const pt = isAlias(parent) ? 'alias' : 'scalar'; - throw new Error(`Cannot replace node with ${pt} parent`); - } -} - -export { visit, visitAsync }; diff --git a/bin/node_modules/yaml/browser/index.js b/bin/node_modules/yaml/browser/index.js deleted file mode 100644 index 5f73271..0000000 --- a/bin/node_modules/yaml/browser/index.js +++ /dev/null @@ -1,5 +0,0 @@ -// `export * as default from ...` fails on Webpack v4 -// https://github.com/eemeli/yaml/issues/228 -import * as YAML from './dist/index.js' -export default YAML -export * from './dist/index.js' diff --git a/bin/node_modules/yaml/browser/package.json b/bin/node_modules/yaml/browser/package.json deleted file mode 100644 index 3dbc1ca..0000000 --- a/bin/node_modules/yaml/browser/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/bin/node_modules/yaml/dist/cli.d.ts b/bin/node_modules/yaml/dist/cli.d.ts deleted file mode 100644 index d10442b..0000000 --- a/bin/node_modules/yaml/dist/cli.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -/// -export declare const help = "yaml: A command-line YAML processor and inspector\n\nReads stdin and writes output to stdout and errors & warnings to stderr.\n\nUsage:\n yaml Process a YAML stream, outputting it as YAML\n yaml cst Parse the CST of a YAML stream\n yaml lex Parse the lexical tokens of a YAML stream\n yaml valid Validate a YAML stream, returning 0 on success\n\nOptions:\n --help, -h Show this message.\n --json, -j Output JSON.\n\nAdditional options for bare \"yaml\" command:\n --doc, -d Output pretty-printed JS Document objects.\n --single, -1 Require the input to consist of a single YAML document.\n --strict, -s Stop on errors.\n --visit, -v Apply a visitor to each document (requires a path to import)\n --yaml 1.1 Set the YAML version. (default: 1.2)"; -export declare class UserError extends Error { - static ARGS: number; - static SINGLE: number; - code: number; - constructor(code: number, message: string); -} -export declare function cli(stdin: NodeJS.ReadableStream, done: (error?: Error) => void, argv?: string[]): Promise; diff --git a/bin/node_modules/yaml/dist/cli.mjs b/bin/node_modules/yaml/dist/cli.mjs deleted file mode 100644 index e74c8f8..0000000 --- a/bin/node_modules/yaml/dist/cli.mjs +++ /dev/null @@ -1,195 +0,0 @@ -import { resolve } from 'node:path'; -import { parseArgs } from 'node:util'; -import { prettyToken } from './parse/cst.js'; -import { Lexer } from './parse/lexer.js'; -import { Parser } from './parse/parser.js'; -import { Composer } from './compose/composer.js'; -import { LineCounter } from './parse/line-counter.js'; -import { prettifyError } from './errors.js'; -import { visit } from './visit.js'; - -const help = `\ -yaml: A command-line YAML processor and inspector - -Reads stdin and writes output to stdout and errors & warnings to stderr. - -Usage: - yaml Process a YAML stream, outputting it as YAML - yaml cst Parse the CST of a YAML stream - yaml lex Parse the lexical tokens of a YAML stream - yaml valid Validate a YAML stream, returning 0 on success - -Options: - --help, -h Show this message. - --json, -j Output JSON. - -Additional options for bare "yaml" command: - --doc, -d Output pretty-printed JS Document objects. - --single, -1 Require the input to consist of a single YAML document. - --strict, -s Stop on errors. - --visit, -v Apply a visitor to each document (requires a path to import) - --yaml 1.1 Set the YAML version. (default: 1.2)`; -class UserError extends Error { - constructor(code, message) { - super(`Error: ${message}`); - this.code = code; - } -} -UserError.ARGS = 2; -UserError.SINGLE = 3; -async function cli(stdin, done, argv) { - let args; - try { - args = parseArgs({ - args: argv, - allowPositionals: true, - options: { - doc: { type: 'boolean', short: 'd' }, - help: { type: 'boolean', short: 'h' }, - json: { type: 'boolean', short: 'j' }, - single: { type: 'boolean', short: '1' }, - strict: { type: 'boolean', short: 's' }, - visit: { type: 'string', short: 'v' }, - yaml: { type: 'string', default: '1.2' } - } - }); - } - catch (error) { - return done(new UserError(UserError.ARGS, error.message)); - } - const { positionals: [mode], values: opt } = args; - stdin.setEncoding('utf-8'); - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - switch (opt.help || mode) { - /* istanbul ignore next */ - case true: // --help - console.log(help); - break; - case 'lex': { - const lexer = new Lexer(); - const data = []; - const add = (tok) => { - if (opt.json) - data.push(tok); - else - console.log(prettyToken(tok)); - }; - stdin.on('data', (chunk) => { - for (const tok of lexer.lex(chunk, true)) - add(tok); - }); - stdin.on('end', () => { - for (const tok of lexer.lex('', false)) - add(tok); - if (opt.json) - console.log(JSON.stringify(data)); - done(); - }); - break; - } - case 'cst': { - const parser = new Parser(); - const data = []; - const add = (tok) => { - if (opt.json) - data.push(tok); - else - console.dir(tok, { depth: null }); - }; - stdin.on('data', (chunk) => { - for (const tok of parser.parse(chunk, true)) - add(tok); - }); - stdin.on('end', () => { - for (const tok of parser.parse('', false)) - add(tok); - if (opt.json) - console.log(JSON.stringify(data)); - done(); - }); - break; - } - case undefined: - case 'valid': { - const lineCounter = new LineCounter(); - const parser = new Parser(lineCounter.addNewLine); - // @ts-expect-error Version is validated at runtime - const composer = new Composer({ version: opt.yaml }); - const visitor = opt.visit - ? (await import(resolve(opt.visit))).default - : null; - let source = ''; - let hasDoc = false; - let reqDocEnd = false; - const data = []; - const add = (doc) => { - if (hasDoc && opt.single) { - return done(new UserError(UserError.SINGLE, 'Input stream contains multiple documents')); - } - for (const error of doc.errors) { - prettifyError(source, lineCounter)(error); - if (opt.strict || mode === 'valid') - return done(error); - console.error(error); - } - for (const warning of doc.warnings) { - prettifyError(source, lineCounter)(warning); - console.error(warning); - } - if (visitor) - visit(doc, visitor); - if (mode === 'valid') - doc.toJS(); - else if (opt.json) - data.push(doc); - else if (opt.doc) { - Object.defineProperties(doc, { - options: { enumerable: false }, - schema: { enumerable: false } - }); - console.dir(doc, { depth: null }); - } - else { - if (reqDocEnd) - console.log('...'); - try { - const str = String(doc); - console.log(str.endsWith('\n') ? str.slice(0, -1) : str); - } - catch (error) { - done(error); - } - } - hasDoc = true; - reqDocEnd = !doc.directives?.docEnd; - }; - stdin.on('data', (chunk) => { - source += chunk; - for (const tok of parser.parse(chunk, true)) { - for (const doc of composer.next(tok)) - add(doc); - } - }); - stdin.on('end', () => { - for (const tok of parser.parse('', false)) { - for (const doc of composer.next(tok)) - add(doc); - } - for (const doc of composer.end(false)) - add(doc); - if (opt.single && !hasDoc) { - return done(new UserError(UserError.SINGLE, 'Input stream contained no documents')); - } - if (mode !== 'valid' && opt.json) { - console.log(JSON.stringify(opt.single ? data[0] : data)); - } - done(); - }); - break; - } - default: - done(new UserError(UserError.ARGS, `Unknown command: ${JSON.stringify(mode)}`)); - } -} - -export { UserError, cli, help }; diff --git a/bin/node_modules/yaml/dist/compose/compose-collection.d.ts b/bin/node_modules/yaml/dist/compose/compose-collection.d.ts deleted file mode 100644 index b310e2d..0000000 --- a/bin/node_modules/yaml/dist/compose/compose-collection.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { ParsedNode } from '../nodes/Node.js'; -import type { BlockMap, BlockSequence, FlowCollection, SourceToken } from '../parse/cst.js'; -import type { ComposeContext, ComposeNode } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function composeCollection(CN: ComposeNode, ctx: ComposeContext, token: BlockMap | BlockSequence | FlowCollection, tagToken: SourceToken | null, onError: ComposeErrorHandler): ParsedNode; diff --git a/bin/node_modules/yaml/dist/compose/compose-collection.js b/bin/node_modules/yaml/dist/compose/compose-collection.js deleted file mode 100644 index e4b1971..0000000 --- a/bin/node_modules/yaml/dist/compose/compose-collection.js +++ /dev/null @@ -1,78 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var Scalar = require('../nodes/Scalar.js'); -var YAMLMap = require('../nodes/YAMLMap.js'); -var YAMLSeq = require('../nodes/YAMLSeq.js'); -var resolveBlockMap = require('./resolve-block-map.js'); -var resolveBlockSeq = require('./resolve-block-seq.js'); -var resolveFlowCollection = require('./resolve-flow-collection.js'); - -function resolveCollection(CN, ctx, token, onError, tagName, tag) { - const coll = token.type === 'block-map' - ? resolveBlockMap.resolveBlockMap(CN, ctx, token, onError, tag) - : token.type === 'block-seq' - ? resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError, tag) - : resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError, tag); - const Coll = coll.constructor; - // If we got a tagName matching the class, or the tag name is '!', - // then use the tagName from the node class used to create it. - if (tagName === '!' || tagName === Coll.tagName) { - coll.tag = Coll.tagName; - return coll; - } - if (tagName) - coll.tag = tagName; - return coll; -} -function composeCollection(CN, ctx, token, tagToken, onError) { - const tagName = !tagToken - ? null - : ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); - const expType = token.type === 'block-map' - ? 'map' - : token.type === 'block-seq' - ? 'seq' - : token.start.source === '{' - ? 'map' - : 'seq'; - // shortcut: check if it's a generic YAMLMap or YAMLSeq - // before jumping into the custom tag logic. - if (!tagToken || - !tagName || - tagName === '!' || - (tagName === YAMLMap.YAMLMap.tagName && expType === 'map') || - (tagName === YAMLSeq.YAMLSeq.tagName && expType === 'seq') || - !expType) { - return resolveCollection(CN, ctx, token, onError, tagName); - } - let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType); - if (!tag) { - const kt = ctx.schema.knownTags[tagName]; - if (kt && kt.collection === expType) { - ctx.schema.tags.push(Object.assign({}, kt, { default: false })); - tag = kt; - } - else { - if (kt?.collection) { - onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true); - } - else { - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); - } - return resolveCollection(CN, ctx, token, onError, tagName); - } - } - const coll = resolveCollection(CN, ctx, token, onError, tagName, tag); - const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll; - const node = identity.isNode(res) - ? res - : new Scalar.Scalar(res); - node.range = coll.range; - node.tag = tagName; - if (tag?.format) - node.format = tag.format; - return node; -} - -exports.composeCollection = composeCollection; diff --git a/bin/node_modules/yaml/dist/compose/compose-doc.d.ts b/bin/node_modules/yaml/dist/compose/compose-doc.d.ts deleted file mode 100644 index 05816a7..0000000 --- a/bin/node_modules/yaml/dist/compose/compose-doc.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { Directives } from '../doc/directives.js'; -import { Document } from '../doc/Document.js'; -import type { ParsedNode } from '../nodes/Node.js'; -import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options.js'; -import type * as CST from '../parse/cst.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function composeDoc(options: ParseOptions & DocumentOptions & SchemaOptions, directives: Directives, { offset, start, value, end }: CST.Document, onError: ComposeErrorHandler): Document.Parsed; diff --git a/bin/node_modules/yaml/dist/compose/compose-doc.js b/bin/node_modules/yaml/dist/compose/compose-doc.js deleted file mode 100644 index d19c227..0000000 --- a/bin/node_modules/yaml/dist/compose/compose-doc.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -var Document = require('../doc/Document.js'); -var composeNode = require('./compose-node.js'); -var resolveEnd = require('./resolve-end.js'); -var resolveProps = require('./resolve-props.js'); - -function composeDoc(options, directives, { offset, start, value, end }, onError) { - const opts = Object.assign({ _directives: directives }, options); - const doc = new Document.Document(undefined, opts); - const ctx = { - atRoot: true, - directives: doc.directives, - options: doc.options, - schema: doc.schema - }; - const props = resolveProps.resolveProps(start, { - indicator: 'doc-start', - next: value ?? end?.[0], - offset, - onError, - startOnNewline: true - }); - if (props.found) { - doc.directives.docStart = true; - if (value && - (value.type === 'block-map' || value.type === 'block-seq') && - !props.hasNewline) - onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); - } - // @ts-expect-error If Contents is set, let's trust the user - doc.contents = value - ? composeNode.composeNode(ctx, value, props, onError) - : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError); - const contentEnd = doc.contents.range[2]; - const re = resolveEnd.resolveEnd(end, contentEnd, false, onError); - if (re.comment) - doc.comment = re.comment; - doc.range = [offset, contentEnd, re.offset]; - return doc; -} - -exports.composeDoc = composeDoc; diff --git a/bin/node_modules/yaml/dist/compose/compose-node.d.ts b/bin/node_modules/yaml/dist/compose/compose-node.d.ts deleted file mode 100644 index 8d3e467..0000000 --- a/bin/node_modules/yaml/dist/compose/compose-node.d.ts +++ /dev/null @@ -1,27 +0,0 @@ -import type { Directives } from '../doc/directives.js'; -import type { ParsedNode } from '../nodes/Node.js'; -import type { ParseOptions } from '../options.js'; -import type { SourceToken, Token } from '../parse/cst.js'; -import type { Schema } from '../schema/Schema.js'; -import type { ComposeErrorHandler } from './composer.js'; -export interface ComposeContext { - atRoot: boolean; - directives: Directives; - options: Readonly>>; - schema: Readonly; -} -interface Props { - spaceBefore: boolean; - comment: string; - anchor: SourceToken | null; - tag: SourceToken | null; - end: number; -} -declare const CN: { - composeNode: typeof composeNode; - composeEmptyNode: typeof composeEmptyNode; -}; -export type ComposeNode = typeof CN; -export declare function composeNode(ctx: ComposeContext, token: Token, props: Props, onError: ComposeErrorHandler): ParsedNode; -export declare function composeEmptyNode(ctx: ComposeContext, offset: number, before: Token[] | undefined, pos: number | null, { spaceBefore, comment, anchor, tag, end }: Props, onError: ComposeErrorHandler): import("../index.js").Scalar.Parsed; -export {}; diff --git a/bin/node_modules/yaml/dist/compose/compose-node.js b/bin/node_modules/yaml/dist/compose/compose-node.js deleted file mode 100644 index 7eb1daf..0000000 --- a/bin/node_modules/yaml/dist/compose/compose-node.js +++ /dev/null @@ -1,95 +0,0 @@ -'use strict'; - -var Alias = require('../nodes/Alias.js'); -var composeCollection = require('./compose-collection.js'); -var composeScalar = require('./compose-scalar.js'); -var resolveEnd = require('./resolve-end.js'); -var utilEmptyScalarPosition = require('./util-empty-scalar-position.js'); - -const CN = { composeNode, composeEmptyNode }; -function composeNode(ctx, token, props, onError) { - const { spaceBefore, comment, anchor, tag } = props; - let node; - let isSrcToken = true; - switch (token.type) { - case 'alias': - node = composeAlias(ctx, token, onError); - if (anchor || tag) - onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); - break; - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'block-scalar': - node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - case 'block-map': - case 'block-seq': - case 'flow-collection': - node = composeCollection.composeCollection(CN, ctx, token, tag, onError); - if (anchor) - node.anchor = anchor.source.substring(1); - break; - default: { - const message = token.type === 'error' - ? token.message - : `Unsupported token (type: ${token.type})`; - onError(token, 'UNEXPECTED_TOKEN', message); - node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); - isSrcToken = false; - } - } - if (anchor && node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - if (token.type === 'scalar' && token.source === '') - node.comment = comment; - else - node.commentBefore = comment; - } - // @ts-expect-error Type checking misses meaning of isSrcToken - if (ctx.options.keepSourceTokens && isSrcToken) - node.srcToken = token; - return node; -} -function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) { - const token = { - type: 'scalar', - offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos), - indent: -1, - source: '' - }; - const node = composeScalar.composeScalar(ctx, token, tag, onError); - if (anchor) { - node.anchor = anchor.source.substring(1); - if (node.anchor === '') - onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); - } - if (spaceBefore) - node.spaceBefore = true; - if (comment) { - node.comment = comment; - node.range[2] = end; - } - return node; -} -function composeAlias({ options }, { offset, source, end }, onError) { - const alias = new Alias.Alias(source.substring(1)); - if (alias.source === '') - onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); - if (alias.source.endsWith(':')) - onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError); - alias.range = [offset, valueEnd, re.offset]; - if (re.comment) - alias.comment = re.comment; - return alias; -} - -exports.composeEmptyNode = composeEmptyNode; -exports.composeNode = composeNode; diff --git a/bin/node_modules/yaml/dist/compose/compose-scalar.d.ts b/bin/node_modules/yaml/dist/compose/compose-scalar.d.ts deleted file mode 100644 index d5d0f79..0000000 --- a/bin/node_modules/yaml/dist/compose/compose-scalar.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { Scalar } from '../nodes/Scalar.js'; -import type { BlockScalar, FlowScalar, SourceToken } from '../parse/cst.js'; -import type { ComposeContext } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function composeScalar(ctx: ComposeContext, token: FlowScalar | BlockScalar, tagToken: SourceToken | null, onError: ComposeErrorHandler): Scalar.Parsed; diff --git a/bin/node_modules/yaml/dist/compose/compose-scalar.js b/bin/node_modules/yaml/dist/compose/compose-scalar.js deleted file mode 100644 index ace8964..0000000 --- a/bin/node_modules/yaml/dist/compose/compose-scalar.js +++ /dev/null @@ -1,82 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var Scalar = require('../nodes/Scalar.js'); -var resolveBlockScalar = require('./resolve-block-scalar.js'); -var resolveFlowScalar = require('./resolve-flow-scalar.js'); - -function composeScalar(ctx, token, tagToken, onError) { - const { value, type, comment, range } = token.type === 'block-scalar' - ? resolveBlockScalar.resolveBlockScalar(token, ctx.options.strict, onError) - : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError); - const tagName = tagToken - ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) - : null; - const tag = tagToken && tagName - ? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError) - : token.type === 'scalar' - ? findScalarTagByTest(ctx, value, token, onError) - : ctx.schema[identity.SCALAR]; - let scalar; - try { - const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); - scalar = identity.isScalar(res) ? res : new Scalar.Scalar(res); - } - catch (error) { - const msg = error instanceof Error ? error.message : String(error); - onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); - scalar = new Scalar.Scalar(value); - } - scalar.range = range; - scalar.source = value; - if (type) - scalar.type = type; - if (tagName) - scalar.tag = tagName; - if (tag.format) - scalar.format = tag.format; - if (comment) - scalar.comment = comment; - return scalar; -} -function findScalarTagByName(schema, value, tagName, tagToken, onError) { - if (tagName === '!') - return schema[identity.SCALAR]; // non-specific tag - const matchWithTest = []; - for (const tag of schema.tags) { - if (!tag.collection && tag.tag === tagName) { - if (tag.default && tag.test) - matchWithTest.push(tag); - else - return tag; - } - } - for (const tag of matchWithTest) - if (tag.test?.test(value)) - return tag; - const kt = schema.knownTags[tagName]; - if (kt && !kt.collection) { - // Ensure that the known tag is available for stringifying, - // but does not get used by default. - schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); - return kt; - } - onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); - return schema[identity.SCALAR]; -} -function findScalarTagByTest({ directives, schema }, value, token, onError) { - const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[identity.SCALAR]; - if (schema.compat) { - const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? - schema[identity.SCALAR]; - if (tag.tag !== compat.tag) { - const ts = directives.tagString(tag.tag); - const cs = directives.tagString(compat.tag); - const msg = `Value may be parsed as either ${ts} or ${cs}`; - onError(token, 'TAG_RESOLVE_FAILED', msg, true); - } - } - return tag; -} - -exports.composeScalar = composeScalar; diff --git a/bin/node_modules/yaml/dist/compose/composer.d.ts b/bin/node_modules/yaml/dist/compose/composer.d.ts deleted file mode 100644 index cd3d323..0000000 --- a/bin/node_modules/yaml/dist/compose/composer.d.ts +++ /dev/null @@ -1,62 +0,0 @@ -import { Directives } from '../doc/directives.js'; -import { Document } from '../doc/Document.js'; -import { ErrorCode, YAMLParseError, YAMLWarning } from '../errors.js'; -import type { ParsedNode, Range } from '../nodes/Node.js'; -import type { DocumentOptions, ParseOptions, SchemaOptions } from '../options.js'; -import type { Token } from '../parse/cst.js'; -type ErrorSource = number | [number, number] | Range | { - offset: number; - source?: string; -}; -export type ComposeErrorHandler = (source: ErrorSource, code: ErrorCode, message: string, warning?: boolean) => void; -/** - * Compose a stream of CST nodes into a stream of YAML Documents. - * - * ```ts - * import { Composer, Parser } from 'yaml' - * - * const src: string = ... - * const tokens = new Parser().parse(src) - * const docs = new Composer().compose(tokens) - * ``` - */ -export declare class Composer { - private directives; - private doc; - private options; - private atDirectives; - private prelude; - private errors; - private warnings; - constructor(options?: ParseOptions & DocumentOptions & SchemaOptions); - private onError; - private decorate; - /** - * Current stream status information. - * - * Mostly useful at the end of input for an empty stream. - */ - streamInfo(): { - comment: string; - directives: Directives; - errors: YAMLParseError[]; - warnings: YAMLWarning[]; - }; - /** - * Compose tokens into documents. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - compose(tokens: Iterable, forceDoc?: boolean, endOffset?: number): Generator, void, unknown>; - /** Advance the composer by one CST token. */ - next(token: Token): Generator, void, unknown>; - /** - * Call at end of input to yield any remaining document. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - end(forceDoc?: boolean, endOffset?: number): Generator, void, unknown>; -} -export {}; diff --git a/bin/node_modules/yaml/dist/compose/composer.js b/bin/node_modules/yaml/dist/compose/composer.js deleted file mode 100644 index 0b89117..0000000 --- a/bin/node_modules/yaml/dist/compose/composer.js +++ /dev/null @@ -1,221 +0,0 @@ -'use strict'; - -var directives = require('../doc/directives.js'); -var Document = require('../doc/Document.js'); -var errors = require('../errors.js'); -var identity = require('../nodes/identity.js'); -var composeDoc = require('./compose-doc.js'); -var resolveEnd = require('./resolve-end.js'); - -function getErrorPos(src) { - if (typeof src === 'number') - return [src, src + 1]; - if (Array.isArray(src)) - return src.length === 2 ? src : [src[0], src[1]]; - const { offset, source } = src; - return [offset, offset + (typeof source === 'string' ? source.length : 1)]; -} -function parsePrelude(prelude) { - let comment = ''; - let atComment = false; - let afterEmptyLine = false; - for (let i = 0; i < prelude.length; ++i) { - const source = prelude[i]; - switch (source[0]) { - case '#': - comment += - (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + - (source.substring(1) || ' '); - atComment = true; - afterEmptyLine = false; - break; - case '%': - if (prelude[i + 1]?.[0] !== '#') - i += 1; - atComment = false; - break; - default: - // This may be wrong after doc-end, but in that case it doesn't matter - if (!atComment) - afterEmptyLine = true; - atComment = false; - } - } - return { comment, afterEmptyLine }; -} -/** - * Compose a stream of CST nodes into a stream of YAML Documents. - * - * ```ts - * import { Composer, Parser } from 'yaml' - * - * const src: string = ... - * const tokens = new Parser().parse(src) - * const docs = new Composer().compose(tokens) - * ``` - */ -class Composer { - constructor(options = {}) { - this.doc = null; - this.atDirectives = false; - this.prelude = []; - this.errors = []; - this.warnings = []; - this.onError = (source, code, message, warning) => { - const pos = getErrorPos(source); - if (warning) - this.warnings.push(new errors.YAMLWarning(pos, code, message)); - else - this.errors.push(new errors.YAMLParseError(pos, code, message)); - }; - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - this.directives = new directives.Directives({ version: options.version || '1.2' }); - this.options = options; - } - decorate(doc, afterDoc) { - const { comment, afterEmptyLine } = parsePrelude(this.prelude); - //console.log({ dc: doc.comment, prelude, comment }) - if (comment) { - const dc = doc.contents; - if (afterDoc) { - doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; - } - else if (afterEmptyLine || doc.directives.docStart || !dc) { - doc.commentBefore = comment; - } - else if (identity.isCollection(dc) && !dc.flow && dc.items.length > 0) { - let it = dc.items[0]; - if (identity.isPair(it)) - it = it.key; - const cb = it.commentBefore; - it.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - else { - const cb = dc.commentBefore; - dc.commentBefore = cb ? `${comment}\n${cb}` : comment; - } - } - if (afterDoc) { - Array.prototype.push.apply(doc.errors, this.errors); - Array.prototype.push.apply(doc.warnings, this.warnings); - } - else { - doc.errors = this.errors; - doc.warnings = this.warnings; - } - this.prelude = []; - this.errors = []; - this.warnings = []; - } - /** - * Current stream status information. - * - * Mostly useful at the end of input for an empty stream. - */ - streamInfo() { - return { - comment: parsePrelude(this.prelude).comment, - directives: this.directives, - errors: this.errors, - warnings: this.warnings - }; - } - /** - * Compose tokens into documents. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *compose(tokens, forceDoc = false, endOffset = -1) { - for (const token of tokens) - yield* this.next(token); - yield* this.end(forceDoc, endOffset); - } - /** Advance the composer by one CST token. */ - *next(token) { - if (process.env.LOG_STREAM) - console.dir(token, { depth: null }); - switch (token.type) { - case 'directive': - this.directives.add(token.source, (offset, message, warning) => { - const pos = getErrorPos(token); - pos[0] += offset; - this.onError(pos, 'BAD_DIRECTIVE', message, warning); - }); - this.prelude.push(token.source); - this.atDirectives = true; - break; - case 'document': { - const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError); - if (this.atDirectives && !doc.directives.docStart) - this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); - this.decorate(doc, false); - if (this.doc) - yield this.doc; - this.doc = doc; - this.atDirectives = false; - break; - } - case 'byte-order-mark': - case 'space': - break; - case 'comment': - case 'newline': - this.prelude.push(token.source); - break; - case 'error': { - const msg = token.source - ? `${token.message}: ${JSON.stringify(token.source)}` - : token.message; - const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); - if (this.atDirectives || !this.doc) - this.errors.push(error); - else - this.doc.errors.push(error); - break; - } - case 'doc-end': { - if (!this.doc) { - const msg = 'Unexpected doc-end without preceding document'; - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); - break; - } - this.doc.directives.docEnd = true; - const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); - this.decorate(this.doc, true); - if (end.comment) { - const dc = this.doc.comment; - this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; - } - this.doc.range[2] = end.offset; - break; - } - default: - this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); - } - } - /** - * Call at end of input to yield any remaining document. - * - * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. - * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. - */ - *end(forceDoc = false, endOffset = -1) { - if (this.doc) { - this.decorate(this.doc, true); - yield this.doc; - this.doc = null; - } - else if (forceDoc) { - const opts = Object.assign({ _directives: this.directives }, this.options); - const doc = new Document.Document(undefined, opts); - if (this.atDirectives) - this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); - doc.range = [0, endOffset, endOffset]; - this.decorate(doc, false); - yield doc; - } - } -} - -exports.Composer = Composer; diff --git a/bin/node_modules/yaml/dist/compose/resolve-block-map.d.ts b/bin/node_modules/yaml/dist/compose/resolve-block-map.d.ts deleted file mode 100644 index 58855c8..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-block-map.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { ParsedNode } from '../nodes/Node.js'; -import { YAMLMap } from '../nodes/YAMLMap.js'; -import type { BlockMap } from '../parse/cst.js'; -import { CollectionTag } from '../schema/types.js'; -import type { ComposeContext, ComposeNode } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveBlockMap({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bm: BlockMap, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLMap.Parsed; diff --git a/bin/node_modules/yaml/dist/compose/resolve-block-map.js b/bin/node_modules/yaml/dist/compose/resolve-block-map.js deleted file mode 100644 index 8e2b18a..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-block-map.js +++ /dev/null @@ -1,113 +0,0 @@ -'use strict'; - -var Pair = require('../nodes/Pair.js'); -var YAMLMap = require('../nodes/YAMLMap.js'); -var resolveProps = require('./resolve-props.js'); -var utilContainsNewline = require('./util-contains-newline.js'); -var utilFlowIndentCheck = require('./util-flow-indent-check.js'); -var utilMapIncludes = require('./util-map-includes.js'); - -const startColMsg = 'All mapping items must start at the same column'; -function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) { - const NodeClass = tag?.nodeClass ?? YAMLMap.YAMLMap; - const map = new NodeClass(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bm.offset; - let commentEnd = null; - for (const collItem of bm.items) { - const { start, key, sep, value } = collItem; - // key properties - const keyProps = resolveProps.resolveProps(start, { - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: true - }); - const implicitKey = !keyProps.found; - if (implicitKey) { - if (key) { - if (key.type === 'block-seq') - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); - else if ('indent' in key && key.indent !== bm.indent) - onError(offset, 'BAD_INDENT', startColMsg); - } - if (!keyProps.anchor && !keyProps.tag && !sep) { - commentEnd = keyProps.end; - if (keyProps.comment) { - if (map.comment) - map.comment += '\n' + keyProps.comment; - else - map.comment = keyProps.comment; - } - continue; - } - if (keyProps.hasNewlineAfterProp || utilContainsNewline.containsNewline(key)) { - onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); - } - } - else if (keyProps.found?.indent !== bm.indent) { - onError(offset, 'BAD_INDENT', startColMsg); - } - // key value - const keyStart = keyProps.end; - const keyNode = key - ? composeNode(ctx, key, keyProps, onError) - : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError); - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: !key || key.type === 'block-scalar' - }); - offset = valueProps.end; - if (valueProps.found) { - if (implicitKey) { - if (value?.type === 'block-map' && !valueProps.hasNewline) - onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); - if (ctx.options.strict && - keyProps.start < valueProps.found.offset - 1024) - onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError); - offset = valueNode.range[2]; - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - else { - // key with no value - if (implicitKey) - onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); - if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - map.items.push(pair); - } - } - if (commentEnd && commentEnd < offset) - onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content'); - map.range = [bm.offset, offset, commentEnd ?? offset]; - return map; -} - -exports.resolveBlockMap = resolveBlockMap; diff --git a/bin/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts b/bin/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts deleted file mode 100644 index 4855b19..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-block-scalar.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Range } from '../nodes/Node.js'; -import { Scalar } from '../nodes/Scalar.js'; -import type { BlockScalar } from '../parse/cst.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveBlockScalar(scalar: BlockScalar, strict: boolean, onError: ComposeErrorHandler): { - value: string; - type: Scalar.BLOCK_FOLDED | Scalar.BLOCK_LITERAL | null; - comment: string; - range: Range; -}; diff --git a/bin/node_modules/yaml/dist/compose/resolve-block-scalar.js b/bin/node_modules/yaml/dist/compose/resolve-block-scalar.js deleted file mode 100644 index ba89f97..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-block-scalar.js +++ /dev/null @@ -1,196 +0,0 @@ -'use strict'; - -var Scalar = require('../nodes/Scalar.js'); - -function resolveBlockScalar(scalar, strict, onError) { - const start = scalar.offset; - const header = parseBlockScalarHeader(scalar, strict, onError); - if (!header) - return { value: '', type: null, comment: '', range: [start, start, start] }; - const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL; - const lines = scalar.source ? splitLines(scalar.source) : []; - // determine the end of content & start of chomping - let chompStart = lines.length; - for (let i = lines.length - 1; i >= 0; --i) { - const content = lines[i][1]; - if (content === '' || content === '\r') - chompStart = i; - else - break; - } - // shortcut for empty contents - if (chompStart === 0) { - const value = header.chomp === '+' && lines.length > 0 - ? '\n'.repeat(Math.max(1, lines.length - 1)) - : ''; - let end = start + header.length; - if (scalar.source) - end += scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; - } - // find the indentation level to trim from start - let trimIndent = scalar.indent + header.indent; - let offset = scalar.offset + header.length; - let contentStart = 0; - for (let i = 0; i < chompStart; ++i) { - const [indent, content] = lines[i]; - if (content === '' || content === '\r') { - if (header.indent === 0 && indent.length > trimIndent) - trimIndent = indent.length; - } - else { - if (indent.length < trimIndent) { - const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; - onError(offset + indent.length, 'MISSING_CHAR', message); - } - if (header.indent === 0) - trimIndent = indent.length; - contentStart = i; - break; - } - offset += indent.length + content.length + 1; - } - // include trailing more-indented empty lines in content - for (let i = lines.length - 1; i >= chompStart; --i) { - if (lines[i][0].length > trimIndent) - chompStart = i + 1; - } - let value = ''; - let sep = ''; - let prevMoreIndented = false; - // leading whitespace is kept intact - for (let i = 0; i < contentStart; ++i) - value += lines[i][0].slice(trimIndent) + '\n'; - for (let i = contentStart; i < chompStart; ++i) { - let [indent, content] = lines[i]; - offset += indent.length + content.length + 1; - const crlf = content[content.length - 1] === '\r'; - if (crlf) - content = content.slice(0, -1); - /* istanbul ignore if already caught in lexer */ - if (content && indent.length < trimIndent) { - const src = header.indent - ? 'explicit indentation indicator' - : 'first line'; - const message = `Block scalar lines must not be less indented than their ${src}`; - onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); - indent = ''; - } - if (type === Scalar.Scalar.BLOCK_LITERAL) { - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - } - else if (indent.length > trimIndent || content[0] === '\t') { - // more-indented content within a folded block - if (sep === ' ') - sep = '\n'; - else if (!prevMoreIndented && sep === '\n') - sep = '\n\n'; - value += sep + indent.slice(trimIndent) + content; - sep = '\n'; - prevMoreIndented = true; - } - else if (content === '') { - // empty line - if (sep === '\n') - value += '\n'; - else - sep = '\n'; - } - else { - value += sep + content; - sep = ' '; - prevMoreIndented = false; - } - } - switch (header.chomp) { - case '-': - break; - case '+': - for (let i = chompStart; i < lines.length; ++i) - value += '\n' + lines[i][0].slice(trimIndent); - if (value[value.length - 1] !== '\n') - value += '\n'; - break; - default: - value += '\n'; - } - const end = start + header.length + scalar.source.length; - return { value, type, comment: header.comment, range: [start, end, end] }; -} -function parseBlockScalarHeader({ offset, props }, strict, onError) { - /* istanbul ignore if should not happen */ - if (props[0].type !== 'block-scalar-header') { - onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); - return null; - } - const { source } = props[0]; - const mode = source[0]; - let indent = 0; - let chomp = ''; - let error = -1; - for (let i = 1; i < source.length; ++i) { - const ch = source[i]; - if (!chomp && (ch === '-' || ch === '+')) - chomp = ch; - else { - const n = Number(ch); - if (!indent && n) - indent = n; - else if (error === -1) - error = offset + i; - } - } - if (error !== -1) - onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); - let hasSpace = false; - let comment = ''; - let length = source.length; - for (let i = 1; i < props.length; ++i) { - const token = props[i]; - switch (token.type) { - case 'space': - hasSpace = true; - // fallthrough - case 'newline': - length += token.source.length; - break; - case 'comment': - if (strict && !hasSpace) { - const message = 'Comments must be separated from other tokens by white space characters'; - onError(token, 'MISSING_CHAR', message); - } - length += token.source.length; - comment = token.source.substring(1); - break; - case 'error': - onError(token, 'UNEXPECTED_TOKEN', token.message); - length += token.source.length; - break; - /* istanbul ignore next should not happen */ - default: { - const message = `Unexpected token in block scalar header: ${token.type}`; - onError(token, 'UNEXPECTED_TOKEN', message); - const ts = token.source; - if (ts && typeof ts === 'string') - length += ts.length; - } - } - } - return { mode, indent, chomp, comment, length }; -} -/** @returns Array of lines split up as `[indent, content]` */ -function splitLines(source) { - const split = source.split(/\n( *)/); - const first = split[0]; - const m = first.match(/^( *)/); - const line0 = m?.[1] - ? [m[1], first.slice(m[1].length)] - : ['', first]; - const lines = [line0]; - for (let i = 1; i < split.length; i += 2) - lines.push([split[i], split[i + 1]]); - return lines; -} - -exports.resolveBlockScalar = resolveBlockScalar; diff --git a/bin/node_modules/yaml/dist/compose/resolve-block-seq.d.ts b/bin/node_modules/yaml/dist/compose/resolve-block-seq.d.ts deleted file mode 100644 index 3a15f52..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-block-seq.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { YAMLSeq } from '../nodes/YAMLSeq.js'; -import type { BlockSequence } from '../parse/cst.js'; -import { CollectionTag } from '../schema/types.js'; -import type { ComposeContext, ComposeNode } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveBlockSeq({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, bs: BlockSequence, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLSeq.Parsed; diff --git a/bin/node_modules/yaml/dist/compose/resolve-block-seq.js b/bin/node_modules/yaml/dist/compose/resolve-block-seq.js deleted file mode 100644 index 9bb26db..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-block-seq.js +++ /dev/null @@ -1,48 +0,0 @@ -'use strict'; - -var YAMLSeq = require('../nodes/YAMLSeq.js'); -var resolveProps = require('./resolve-props.js'); -var utilFlowIndentCheck = require('./util-flow-indent-check.js'); - -function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) { - const NodeClass = tag?.nodeClass ?? YAMLSeq.YAMLSeq; - const seq = new NodeClass(ctx.schema); - if (ctx.atRoot) - ctx.atRoot = false; - let offset = bs.offset; - let commentEnd = null; - for (const { start, value } of bs.items) { - const props = resolveProps.resolveProps(start, { - indicator: 'seq-item-ind', - next: value, - offset, - onError, - startOnNewline: true - }); - if (!props.found) { - if (props.anchor || props.tag || value) { - if (value && value.type === 'block-seq') - onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column'); - else - onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); - } - else { - commentEnd = props.end; - if (props.comment) - seq.comment = props.comment; - continue; - } - } - const node = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, start, null, props, onError); - if (ctx.schema.compat) - utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError); - offset = node.range[2]; - seq.items.push(node); - } - seq.range = [bs.offset, offset, commentEnd ?? offset]; - return seq; -} - -exports.resolveBlockSeq = resolveBlockSeq; diff --git a/bin/node_modules/yaml/dist/compose/resolve-end.d.ts b/bin/node_modules/yaml/dist/compose/resolve-end.d.ts deleted file mode 100644 index bb2d0b8..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-end.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { SourceToken } from '../parse/cst.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveEnd(end: SourceToken[] | undefined, offset: number, reqSpace: boolean, onError: ComposeErrorHandler): { - comment: string; - offset: number; -}; diff --git a/bin/node_modules/yaml/dist/compose/resolve-end.js b/bin/node_modules/yaml/dist/compose/resolve-end.js deleted file mode 100644 index 3a58347..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-end.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict'; - -function resolveEnd(end, offset, reqSpace, onError) { - let comment = ''; - if (end) { - let hasSpace = false; - let sep = ''; - for (const token of end) { - const { source, type } = token; - switch (type) { - case 'space': - hasSpace = true; - break; - case 'comment': { - if (reqSpace && !hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += sep + cb; - sep = ''; - break; - } - case 'newline': - if (comment) - sep += source; - hasSpace = true; - break; - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); - } - offset += source.length; - } - } - return { comment, offset }; -} - -exports.resolveEnd = resolveEnd; diff --git a/bin/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts b/bin/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts deleted file mode 100644 index 8db5985..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-flow-collection.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { YAMLMap } from '../nodes/YAMLMap.js'; -import { YAMLSeq } from '../nodes/YAMLSeq.js'; -import type { FlowCollection } from '../parse/cst.js'; -import { CollectionTag } from '../schema/types.js'; -import type { ComposeContext, ComposeNode } from './compose-node.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveFlowCollection({ composeNode, composeEmptyNode }: ComposeNode, ctx: ComposeContext, fc: FlowCollection, onError: ComposeErrorHandler, tag?: CollectionTag): YAMLMap.Parsed | YAMLSeq.Parsed; diff --git a/bin/node_modules/yaml/dist/compose/resolve-flow-collection.js b/bin/node_modules/yaml/dist/compose/resolve-flow-collection.js deleted file mode 100644 index ec29d08..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-flow-collection.js +++ /dev/null @@ -1,201 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var Pair = require('../nodes/Pair.js'); -var YAMLMap = require('../nodes/YAMLMap.js'); -var YAMLSeq = require('../nodes/YAMLSeq.js'); -var resolveEnd = require('./resolve-end.js'); -var resolveProps = require('./resolve-props.js'); -var utilContainsNewline = require('./util-contains-newline.js'); -var utilMapIncludes = require('./util-map-includes.js'); - -const blockMsg = 'Block collections are not allowed within flow collections'; -const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); -function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) { - const isMap = fc.start.source === '{'; - const fcName = isMap ? 'flow map' : 'flow sequence'; - const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap.YAMLMap : YAMLSeq.YAMLSeq)); - const coll = new NodeClass(ctx.schema); - coll.flow = true; - const atRoot = ctx.atRoot; - if (atRoot) - ctx.atRoot = false; - let offset = fc.offset + fc.start.source.length; - for (let i = 0; i < fc.items.length; ++i) { - const collItem = fc.items[i]; - const { start, key, sep, value } = collItem; - const props = resolveProps.resolveProps(start, { - flow: fcName, - indicator: 'explicit-key-ind', - next: key ?? sep?.[0], - offset, - onError, - startOnNewline: false - }); - if (!props.found) { - if (!props.anchor && !props.tag && !sep && !value) { - if (i === 0 && props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - else if (i < fc.items.length - 1) - onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); - if (props.comment) { - if (coll.comment) - coll.comment += '\n' + props.comment; - else - coll.comment = props.comment; - } - offset = props.end; - continue; - } - if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key)) - onError(key, // checked by containsNewline() - 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - } - if (i === 0) { - if (props.comma) - onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); - } - else { - if (!props.comma) - onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); - if (props.comment) { - let prevItemComment = ''; - loop: for (const st of start) { - switch (st.type) { - case 'comma': - case 'space': - break; - case 'comment': - prevItemComment = st.source.substring(1); - break loop; - default: - break loop; - } - } - if (prevItemComment) { - let prev = coll.items[coll.items.length - 1]; - if (identity.isPair(prev)) - prev = prev.value ?? prev.key; - if (prev.comment) - prev.comment += '\n' + prevItemComment; - else - prev.comment = prevItemComment; - props.comment = props.comment.substring(prevItemComment.length + 1); - } - } - } - if (!isMap && !sep && !props.found) { - // item is a value in a seq - // → key & sep are empty, start does not include ? or : - const valueNode = value - ? composeNode(ctx, value, props, onError) - : composeEmptyNode(ctx, props.end, sep, null, props, onError); - coll.items.push(valueNode); - offset = valueNode.range[2]; - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else { - // item is a key+value pair - // key value - const keyStart = props.end; - const keyNode = key - ? composeNode(ctx, key, props, onError) - : composeEmptyNode(ctx, keyStart, start, null, props, onError); - if (isBlock(key)) - onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); - // value properties - const valueProps = resolveProps.resolveProps(sep ?? [], { - flow: fcName, - indicator: 'map-value-ind', - next: value, - offset: keyNode.range[2], - onError, - startOnNewline: false - }); - if (valueProps.found) { - if (!isMap && !props.found && ctx.options.strict) { - if (sep) - for (const st of sep) { - if (st === valueProps.found) - break; - if (st.type === 'newline') { - onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); - break; - } - } - if (props.start < valueProps.found.offset - 1024) - onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); - } - } - else if (value) { - if ('source' in value && value.source && value.source[0] === ':') - onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); - else - onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); - } - // value value - const valueNode = value - ? composeNode(ctx, value, valueProps, onError) - : valueProps.found - ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) - : null; - if (valueNode) { - if (isBlock(value)) - onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); - } - else if (valueProps.comment) { - if (keyNode.comment) - keyNode.comment += '\n' + valueProps.comment; - else - keyNode.comment = valueProps.comment; - } - const pair = new Pair.Pair(keyNode, valueNode); - if (ctx.options.keepSourceTokens) - pair.srcToken = collItem; - if (isMap) { - const map = coll; - if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) - onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); - map.items.push(pair); - } - else { - const map = new YAMLMap.YAMLMap(ctx.schema); - map.flow = true; - map.items.push(pair); - coll.items.push(map); - } - offset = valueNode ? valueNode.range[2] : valueProps.end; - } - } - const expectedEnd = isMap ? '}' : ']'; - const [ce, ...ee] = fc.end; - let cePos = offset; - if (ce && ce.source === expectedEnd) - cePos = ce.offset + ce.source.length; - else { - const name = fcName[0].toUpperCase() + fcName.substring(1); - const msg = atRoot - ? `${name} must end with a ${expectedEnd}` - : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; - onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); - if (ce && ce.source.length !== 1) - ee.unshift(ce); - } - if (ee.length > 0) { - const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError); - if (end.comment) { - if (coll.comment) - coll.comment += '\n' + end.comment; - else - coll.comment = end.comment; - } - coll.range = [fc.offset, cePos, end.offset]; - } - else { - coll.range = [fc.offset, cePos, cePos]; - } - return coll; -} - -exports.resolveFlowCollection = resolveFlowCollection; diff --git a/bin/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts b/bin/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts deleted file mode 100644 index 0c9204d..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-flow-scalar.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Range } from '../nodes/Node.js'; -import { Scalar } from '../nodes/Scalar.js'; -import type { FlowScalar } from '../parse/cst.js'; -import type { ComposeErrorHandler } from './composer.js'; -export declare function resolveFlowScalar(scalar: FlowScalar, strict: boolean, onError: ComposeErrorHandler): { - value: string; - type: Scalar.PLAIN | Scalar.QUOTE_DOUBLE | Scalar.QUOTE_SINGLE | null; - comment: string; - range: Range; -}; diff --git a/bin/node_modules/yaml/dist/compose/resolve-flow-scalar.js b/bin/node_modules/yaml/dist/compose/resolve-flow-scalar.js deleted file mode 100644 index 7976f2f..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-flow-scalar.js +++ /dev/null @@ -1,225 +0,0 @@ -'use strict'; - -var Scalar = require('../nodes/Scalar.js'); -var resolveEnd = require('./resolve-end.js'); - -function resolveFlowScalar(scalar, strict, onError) { - const { offset, type, source, end } = scalar; - let _type; - let value; - const _onError = (rel, code, msg) => onError(offset + rel, code, msg); - switch (type) { - case 'scalar': - _type = Scalar.Scalar.PLAIN; - value = plainValue(source, _onError); - break; - case 'single-quoted-scalar': - _type = Scalar.Scalar.QUOTE_SINGLE; - value = singleQuotedValue(source, _onError); - break; - case 'double-quoted-scalar': - _type = Scalar.Scalar.QUOTE_DOUBLE; - value = doubleQuotedValue(source, _onError); - break; - /* istanbul ignore next should not happen */ - default: - onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); - return { - value: '', - type: null, - comment: '', - range: [offset, offset + source.length, offset + source.length] - }; - } - const valueEnd = offset + source.length; - const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError); - return { - value, - type: _type, - comment: re.comment, - range: [offset, valueEnd, re.offset] - }; -} -function plainValue(source, onError) { - let badChar = ''; - switch (source[0]) { - /* istanbul ignore next should not happen */ - case '\t': - badChar = 'a tab character'; - break; - case ',': - badChar = 'flow indicator character ,'; - break; - case '%': - badChar = 'directive indicator character %'; - break; - case '|': - case '>': { - badChar = `block scalar indicator ${source[0]}`; - break; - } - case '@': - case '`': { - badChar = `reserved character ${source[0]}`; - break; - } - } - if (badChar) - onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); - return foldLines(source); -} -function singleQuotedValue(source, onError) { - if (source[source.length - 1] !== "'" || source.length === 1) - onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); - return foldLines(source.slice(1, -1)).replace(/''/g, "'"); -} -function foldLines(source) { - /** - * The negative lookbehind here and in the `re` RegExp is to - * prevent causing a polynomial search time in certain cases. - * - * The try-catch is for Safari, which doesn't support this yet: - * https://caniuse.com/js-regexp-lookbehind - */ - let first, line; - try { - first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; - } - else { - res += ch; - } - } - if (source[source.length - 1] !== '"' || source.length === 1) - onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); - return res; -} -/** - * Fold a single newline into a space, multiple newlines to N - 1 newlines. - * Presumes `source[offset] === '\n'` - */ -function foldNewline(source, offset) { - let fold = ''; - let ch = source[offset + 1]; - while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { - if (ch === '\r' && source[offset + 2] !== '\n') - break; - if (ch === '\n') - fold += '\n'; - offset += 1; - ch = source[offset + 1]; - } - if (!fold) - fold = ' '; - return { fold, offset }; -} -const escapeCodes = { - '0': '\0', // null character - a: '\x07', // bell character - b: '\b', // backspace - e: '\x1b', // escape character - f: '\f', // form feed - n: '\n', // line feed - r: '\r', // carriage return - t: '\t', // horizontal tab - v: '\v', // vertical tab - N: '\u0085', // Unicode next line - _: '\u00a0', // Unicode non-breaking space - L: '\u2028', // Unicode line separator - P: '\u2029', // Unicode paragraph separator - ' ': ' ', - '"': '"', - '/': '/', - '\\': '\\', - '\t': '\t' -}; -function parseCharCode(source, offset, length, onError) { - const cc = source.substr(offset, length); - const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); - const code = ok ? parseInt(cc, 16) : NaN; - if (isNaN(code)) { - const raw = source.substr(offset - 2, length + 2); - onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); - return raw; - } - return String.fromCodePoint(code); -} - -exports.resolveFlowScalar = resolveFlowScalar; diff --git a/bin/node_modules/yaml/dist/compose/resolve-props.d.ts b/bin/node_modules/yaml/dist/compose/resolve-props.d.ts deleted file mode 100644 index fba44cf..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-props.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -import type { SourceToken, Token } from '../parse/cst.js'; -import type { ComposeErrorHandler } from './composer.js'; -export interface ResolvePropsArg { - flow?: 'flow map' | 'flow sequence'; - indicator: 'doc-start' | 'explicit-key-ind' | 'map-value-ind' | 'seq-item-ind'; - next: Token | null | undefined; - offset: number; - onError: ComposeErrorHandler; - startOnNewline: boolean; -} -export declare function resolveProps(tokens: SourceToken[], { flow, indicator, next, offset, onError, startOnNewline }: ResolvePropsArg): { - comma: SourceToken | null; - found: SourceToken | null; - spaceBefore: boolean; - comment: string; - hasNewline: boolean; - hasNewlineAfterProp: boolean; - anchor: SourceToken | null; - tag: SourceToken | null; - end: number; - start: number; -}; diff --git a/bin/node_modules/yaml/dist/compose/resolve-props.js b/bin/node_modules/yaml/dist/compose/resolve-props.js deleted file mode 100644 index bc2ef22..0000000 --- a/bin/node_modules/yaml/dist/compose/resolve-props.js +++ /dev/null @@ -1,136 +0,0 @@ -'use strict'; - -function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) { - let spaceBefore = false; - let atNewline = startOnNewline; - let hasSpace = startOnNewline; - let comment = ''; - let commentSep = ''; - let hasNewline = false; - let hasNewlineAfterProp = false; - let reqSpace = false; - let anchor = null; - let tag = null; - let comma = null; - let found = null; - let start = null; - for (const token of tokens) { - if (reqSpace) { - if (token.type !== 'space' && - token.type !== 'newline' && - token.type !== 'comma') - onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - reqSpace = false; - } - switch (token.type) { - case 'space': - // At the doc level, tabs at line start may be parsed - // as leading white space rather than indentation. - // In a flow collection, only the parser handles indent. - if (!flow && - atNewline && - indicator !== 'doc-start' && - token.source[0] === '\t') - onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); - hasSpace = true; - break; - case 'comment': { - if (!hasSpace) - onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); - const cb = token.source.substring(1) || ' '; - if (!comment) - comment = cb; - else - comment += commentSep + cb; - commentSep = ''; - atNewline = false; - break; - } - case 'newline': - if (atNewline) { - if (comment) - comment += token.source; - else - spaceBefore = true; - } - else - commentSep += token.source; - atNewline = true; - hasNewline = true; - if (anchor || tag) - hasNewlineAfterProp = true; - hasSpace = true; - break; - case 'anchor': - if (anchor) - onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); - if (token.source.endsWith(':')) - onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); - anchor = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - case 'tag': { - if (tag) - onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); - tag = token; - if (start === null) - start = token.offset; - atNewline = false; - hasSpace = false; - reqSpace = true; - break; - } - case indicator: - // Could here handle preceding comments differently - if (anchor || tag) - onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); - if (found) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); - found = token; - atNewline = false; - hasSpace = false; - break; - case 'comma': - if (flow) { - if (comma) - onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); - comma = token; - atNewline = false; - hasSpace = false; - break; - } - // else fallthrough - default: - onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); - atNewline = false; - hasSpace = false; - } - } - const last = tokens[tokens.length - 1]; - const end = last ? last.offset + last.source.length : offset; - if (reqSpace && - next && - next.type !== 'space' && - next.type !== 'newline' && - next.type !== 'comma' && - (next.type !== 'scalar' || next.source !== '')) - onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); - return { - comma, - found, - spaceBefore, - comment, - hasNewline, - hasNewlineAfterProp, - anchor, - tag, - end, - start: start ?? end - }; -} - -exports.resolveProps = resolveProps; diff --git a/bin/node_modules/yaml/dist/compose/util-contains-newline.d.ts b/bin/node_modules/yaml/dist/compose/util-contains-newline.d.ts deleted file mode 100644 index 8155be0..0000000 --- a/bin/node_modules/yaml/dist/compose/util-contains-newline.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { Token } from '../parse/cst.js'; -export declare function containsNewline(key: Token | null | undefined): boolean | null; diff --git a/bin/node_modules/yaml/dist/compose/util-contains-newline.js b/bin/node_modules/yaml/dist/compose/util-contains-newline.js deleted file mode 100644 index e7aa82d..0000000 --- a/bin/node_modules/yaml/dist/compose/util-contains-newline.js +++ /dev/null @@ -1,36 +0,0 @@ -'use strict'; - -function containsNewline(key) { - if (!key) - return null; - switch (key.type) { - case 'alias': - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - if (key.source.includes('\n')) - return true; - if (key.end) - for (const st of key.end) - if (st.type === 'newline') - return true; - return false; - case 'flow-collection': - for (const it of key.items) { - for (const st of it.start) - if (st.type === 'newline') - return true; - if (it.sep) - for (const st of it.sep) - if (st.type === 'newline') - return true; - if (containsNewline(it.key) || containsNewline(it.value)) - return true; - } - return false; - default: - return true; - } -} - -exports.containsNewline = containsNewline; diff --git a/bin/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts b/bin/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts deleted file mode 100644 index 90499b8..0000000 --- a/bin/node_modules/yaml/dist/compose/util-empty-scalar-position.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { Token } from '../parse/cst.js'; -export declare function emptyScalarPosition(offset: number, before: Token[] | undefined, pos: number | null): number; diff --git a/bin/node_modules/yaml/dist/compose/util-empty-scalar-position.js b/bin/node_modules/yaml/dist/compose/util-empty-scalar-position.js deleted file mode 100644 index b2cd849..0000000 --- a/bin/node_modules/yaml/dist/compose/util-empty-scalar-position.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict'; - -function emptyScalarPosition(offset, before, pos) { - if (before) { - if (pos === null) - pos = before.length; - for (let i = pos - 1; i >= 0; --i) { - let st = before[i]; - switch (st.type) { - case 'space': - case 'comment': - case 'newline': - offset -= st.source.length; - continue; - } - // Technically, an empty scalar is immediately after the last non-empty - // node, but it's more useful to place it after any whitespace. - st = before[++i]; - while (st?.type === 'space') { - offset += st.source.length; - st = before[++i]; - } - break; - } - } - return offset; -} - -exports.emptyScalarPosition = emptyScalarPosition; diff --git a/bin/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts b/bin/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts deleted file mode 100644 index 64ed1fc..0000000 --- a/bin/node_modules/yaml/dist/compose/util-flow-indent-check.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { Token } from '../parse/cst'; -import { ComposeErrorHandler } from './composer'; -export declare function flowIndentCheck(indent: number, fc: Token | null | undefined, onError: ComposeErrorHandler): void; diff --git a/bin/node_modules/yaml/dist/compose/util-flow-indent-check.js b/bin/node_modules/yaml/dist/compose/util-flow-indent-check.js deleted file mode 100644 index 1e6b06f..0000000 --- a/bin/node_modules/yaml/dist/compose/util-flow-indent-check.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict'; - -var utilContainsNewline = require('./util-contains-newline.js'); - -function flowIndentCheck(indent, fc, onError) { - if (fc?.type === 'flow-collection') { - const end = fc.end[0]; - if (end.indent === indent && - (end.source === ']' || end.source === '}') && - utilContainsNewline.containsNewline(fc)) { - const msg = 'Flow end indicator should be more indented than parent'; - onError(end, 'BAD_INDENT', msg, true); - } - } -} - -exports.flowIndentCheck = flowIndentCheck; diff --git a/bin/node_modules/yaml/dist/compose/util-map-includes.d.ts b/bin/node_modules/yaml/dist/compose/util-map-includes.d.ts deleted file mode 100644 index fae2276..0000000 --- a/bin/node_modules/yaml/dist/compose/util-map-includes.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ParsedNode } from '../nodes/Node.js'; -import type { Pair } from '../nodes/Pair.js'; -import type { ComposeContext } from './compose-node.js'; -export declare function mapIncludes(ctx: ComposeContext, items: Pair[], search: ParsedNode): boolean; diff --git a/bin/node_modules/yaml/dist/compose/util-map-includes.js b/bin/node_modules/yaml/dist/compose/util-map-includes.js deleted file mode 100644 index 8a8d35c..0000000 --- a/bin/node_modules/yaml/dist/compose/util-map-includes.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); - -function mapIncludes(ctx, items, search) { - const { uniqueKeys } = ctx.options; - if (uniqueKeys === false) - return false; - const isEqual = typeof uniqueKeys === 'function' - ? uniqueKeys - : (a, b) => a === b || - (identity.isScalar(a) && - identity.isScalar(b) && - a.value === b.value && - !(a.value === '<<' && ctx.schema.merge)); - return items.some(pair => isEqual(pair.key, search)); -} - -exports.mapIncludes = mapIncludes; diff --git a/bin/node_modules/yaml/dist/doc/Document.d.ts b/bin/node_modules/yaml/dist/doc/Document.d.ts deleted file mode 100644 index 431b907..0000000 --- a/bin/node_modules/yaml/dist/doc/Document.d.ts +++ /dev/null @@ -1,141 +0,0 @@ -import type { YAMLError, YAMLWarning } from '../errors.js'; -import { Alias } from '../nodes/Alias.js'; -import { NODE_TYPE } from '../nodes/identity.js'; -import type { Node, NodeType, ParsedNode, Range } from '../nodes/Node.js'; -import { Pair } from '../nodes/Pair.js'; -import type { Scalar } from '../nodes/Scalar.js'; -import type { YAMLMap } from '../nodes/YAMLMap.js'; -import type { YAMLSeq } from '../nodes/YAMLSeq.js'; -import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from '../options.js'; -import { Schema } from '../schema/Schema.js'; -import { Directives } from './directives.js'; -export type Replacer = any[] | ((key: any, value: any) => unknown); -export declare namespace Document { - /** @ts-ignore The typing of directives fails in TS <= 4.2 */ - interface Parsed extends Document { - directives: Directives; - range: Range; - } -} -export declare class Document { - readonly [NODE_TYPE]: symbol; - /** A comment before this Document */ - commentBefore: string | null; - /** A comment immediately after this Document */ - comment: string | null; - /** The document contents. */ - contents: Strict extends true ? Contents | null : Contents; - directives: Strict extends true ? Directives | undefined : Directives; - /** Errors encountered during parsing. */ - errors: YAMLError[]; - options: Required>; - /** - * The `[start, value-end, node-end]` character offsets for the part of the - * source parsed into this document (undefined if not parsed). The `value-end` - * and `node-end` positions are themselves not included in their respective - * ranges. - */ - range?: Range; - /** The schema used with the document. Use `setSchema()` to change. */ - schema: Schema; - /** Warnings encountered during parsing. */ - warnings: YAMLWarning[]; - /** - * @param value - The initial value for the document, which will be wrapped - * in a Node container. - */ - constructor(value?: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions); - constructor(value: any, replacer: null | Replacer, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions); - /** - * Create a deep copy of this Document and its contents. - * - * Custom Node values that inherit from `Object` still refer to their original instances. - */ - clone(): Document; - /** Adds a value to the document. */ - add(value: any): void; - /** Adds a value to the document. */ - addIn(path: Iterable, value: unknown): void; - /** - * Create a new `Alias` node, ensuring that the target `node` has the required anchor. - * - * If `node` already has an anchor, `name` is ignored. - * Otherwise, the `node.anchor` value will be set to `name`, - * or if an anchor with that name is already present in the document, - * `name` will be used as a prefix for a new unique anchor. - * If `name` is undefined, the generated anchor will use 'a' as a prefix. - */ - createAlias(node: Strict extends true ? Scalar | YAMLMap | YAMLSeq : Node, name?: string): Alias; - /** - * Convert any value into a `Node` using the current schema, recursively - * turning objects into collections. - */ - createNode(value: T, options?: CreateNodeOptions): NodeType; - createNode(value: T, replacer: Replacer | CreateNodeOptions | null, options?: CreateNodeOptions): NodeType; - /** - * Convert a key and a value into a `Pair` using the current schema, - * recursively wrapping all values as `Scalar` or `Collection` nodes. - */ - createPair(key: unknown, value: unknown, options?: CreateNodeOptions): Pair; - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - delete(key: unknown): boolean; - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - deleteIn(path: Iterable | null): boolean; - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - get(key: unknown, keepScalar?: boolean): Strict extends true ? unknown : any; - /** - * Returns item at `path`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path: Iterable | null, keepScalar?: boolean): Strict extends true ? unknown : any; - /** - * Checks if the document includes a value with the key `key`. - */ - has(key: unknown): boolean; - /** - * Checks if the document includes a value at `path`. - */ - hasIn(path: Iterable | null): boolean; - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - set(key: any, value: unknown): void; - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path: Iterable | null, value: unknown): void; - /** - * Change the YAML version and schema used by the document. - * A `null` version disables support for directives, explicit tags, anchors, and aliases. - * It also requires the `schema` option to be given as a `Schema` instance value. - * - * Overrides all previously set schema options. - */ - setSchema(version: '1.1' | '1.2' | 'next' | null, options?: SchemaOptions): void; - /** A plain JavaScript representation of the document `contents`. */ - toJS(opt?: ToJSOptions & { - [ignored: string]: unknown; - }): any; - /** - * A JSON representation of the document `contents`. - * - * @param jsonArg Used by `JSON.stringify` to indicate the array index or - * property name. - */ - toJSON(jsonArg?: string | null, onAnchor?: ToJSOptions['onAnchor']): any; - /** A YAML representation of the document. */ - toString(options?: ToStringOptions): string; -} diff --git a/bin/node_modules/yaml/dist/doc/Document.js b/bin/node_modules/yaml/dist/doc/Document.js deleted file mode 100644 index a0aa955..0000000 --- a/bin/node_modules/yaml/dist/doc/Document.js +++ /dev/null @@ -1,336 +0,0 @@ -'use strict'; - -var Alias = require('../nodes/Alias.js'); -var Collection = require('../nodes/Collection.js'); -var identity = require('../nodes/identity.js'); -var Pair = require('../nodes/Pair.js'); -var toJS = require('../nodes/toJS.js'); -var Schema = require('../schema/Schema.js'); -var stringifyDocument = require('../stringify/stringifyDocument.js'); -var anchors = require('./anchors.js'); -var applyReviver = require('./applyReviver.js'); -var createNode = require('./createNode.js'); -var directives = require('./directives.js'); - -class Document { - constructor(value, replacer, options) { - /** A comment before this Document */ - this.commentBefore = null; - /** A comment immediately after this Document */ - this.comment = null; - /** Errors encountered during parsing. */ - this.errors = []; - /** Warnings encountered during parsing. */ - this.warnings = []; - Object.defineProperty(this, identity.NODE_TYPE, { value: identity.DOC }); - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const opt = Object.assign({ - intAsBigInt: false, - keepSourceTokens: false, - logLevel: 'warn', - prettyErrors: true, - strict: true, - uniqueKeys: true, - version: '1.2' - }, options); - this.options = opt; - let { version } = opt; - if (options?._directives) { - this.directives = options._directives.atDocument(); - if (this.directives.yaml.explicit) - version = this.directives.yaml.version; - } - else - this.directives = new directives.Directives({ version }); - this.setSchema(version, options); - // @ts-expect-error We can't really know that this matches Contents. - this.contents = - value === undefined ? null : this.createNode(value, _replacer, options); - } - /** - * Create a deep copy of this Document and its contents. - * - * Custom Node values that inherit from `Object` still refer to their original instances. - */ - clone() { - const copy = Object.create(Document.prototype, { - [identity.NODE_TYPE]: { value: identity.DOC } - }); - copy.commentBefore = this.commentBefore; - copy.comment = this.comment; - copy.errors = this.errors.slice(); - copy.warnings = this.warnings.slice(); - copy.options = Object.assign({}, this.options); - if (this.directives) - copy.directives = this.directives.clone(); - copy.schema = this.schema.clone(); - // @ts-expect-error We can't really know that this matches Contents. - copy.contents = identity.isNode(this.contents) - ? this.contents.clone(copy.schema) - : this.contents; - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** Adds a value to the document. */ - add(value) { - if (assertCollection(this.contents)) - this.contents.add(value); - } - /** Adds a value to the document. */ - addIn(path, value) { - if (assertCollection(this.contents)) - this.contents.addIn(path, value); - } - /** - * Create a new `Alias` node, ensuring that the target `node` has the required anchor. - * - * If `node` already has an anchor, `name` is ignored. - * Otherwise, the `node.anchor` value will be set to `name`, - * or if an anchor with that name is already present in the document, - * `name` will be used as a prefix for a new unique anchor. - * If `name` is undefined, the generated anchor will use 'a' as a prefix. - */ - createAlias(node, name) { - if (!node.anchor) { - const prev = anchors.anchorNames(this); - node.anchor = - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name; - } - return new Alias.Alias(node.anchor); - } - createNode(value, replacer, options) { - let _replacer = undefined; - if (typeof replacer === 'function') { - value = replacer.call({ '': value }, '', value); - _replacer = replacer; - } - else if (Array.isArray(replacer)) { - const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; - const asStr = replacer.filter(keyToStr).map(String); - if (asStr.length > 0) - replacer = replacer.concat(asStr); - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - replacer = undefined; - } - const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; - const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, - // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing - anchorPrefix || 'a'); - const ctx = { - aliasDuplicateObjects: aliasDuplicateObjects ?? true, - keepUndefined: keepUndefined ?? false, - onAnchor, - onTagObj, - replacer: _replacer, - schema: this.schema, - sourceObjects - }; - const node = createNode.createNode(value, tag, ctx); - if (flow && identity.isCollection(node)) - node.flow = true; - setAnchors(); - return node; - } - /** - * Convert a key and a value into a `Pair` using the current schema, - * recursively wrapping all values as `Scalar` or `Collection` nodes. - */ - createPair(key, value, options = {}) { - const k = this.createNode(key, null, options); - const v = this.createNode(value, null, options); - return new Pair.Pair(k, v); - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - delete(key) { - return assertCollection(this.contents) ? this.contents.delete(key) : false; - } - /** - * Removes a value from the document. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - if (Collection.isEmptyPath(path)) { - if (this.contents == null) - return false; - // @ts-expect-error Presumed impossible if Strict extends false - this.contents = null; - return true; - } - return assertCollection(this.contents) - ? this.contents.deleteIn(path) - : false; - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - get(key, keepScalar) { - return identity.isCollection(this.contents) - ? this.contents.get(key, keepScalar) - : undefined; - } - /** - * Returns item at `path`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - if (Collection.isEmptyPath(path)) - return !keepScalar && identity.isScalar(this.contents) - ? this.contents.value - : this.contents; - return identity.isCollection(this.contents) - ? this.contents.getIn(path, keepScalar) - : undefined; - } - /** - * Checks if the document includes a value with the key `key`. - */ - has(key) { - return identity.isCollection(this.contents) ? this.contents.has(key) : false; - } - /** - * Checks if the document includes a value at `path`. - */ - hasIn(path) { - if (Collection.isEmptyPath(path)) - return this.contents !== undefined; - return identity.isCollection(this.contents) ? this.contents.hasIn(path) : false; - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - set(key, value) { - if (this.contents == null) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = Collection.collectionFromPath(this.schema, [key], value); - } - else if (assertCollection(this.contents)) { - this.contents.set(key, value); - } - } - /** - * Sets a value in this document. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - if (Collection.isEmptyPath(path)) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = value; - } - else if (this.contents == null) { - // @ts-expect-error We can't really know that this matches Contents. - this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value); - } - else if (assertCollection(this.contents)) { - this.contents.setIn(path, value); - } - } - /** - * Change the YAML version and schema used by the document. - * A `null` version disables support for directives, explicit tags, anchors, and aliases. - * It also requires the `schema` option to be given as a `Schema` instance value. - * - * Overrides all previously set schema options. - */ - setSchema(version, options = {}) { - if (typeof version === 'number') - version = String(version); - let opt; - switch (version) { - case '1.1': - if (this.directives) - this.directives.yaml.version = '1.1'; - else - this.directives = new directives.Directives({ version: '1.1' }); - opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' }; - break; - case '1.2': - case 'next': - if (this.directives) - this.directives.yaml.version = version; - else - this.directives = new directives.Directives({ version }); - opt = { merge: false, resolveKnownTags: true, schema: 'core' }; - break; - case null: - if (this.directives) - delete this.directives; - opt = null; - break; - default: { - const sv = JSON.stringify(version); - throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); - } - } - // Not using `instanceof Schema` to allow for duck typing - if (options.schema instanceof Object) - this.schema = options.schema; - else if (opt) - this.schema = new Schema.Schema(Object.assign(opt, options)); - else - throw new Error(`With a null YAML version, the { schema: Schema } option is required`); - } - // json & jsonArg are only used from toJSON() - toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - const ctx = { - anchors: new Map(), - doc: this, - keep: !json, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 - }; - const res = toJS.toJS(this.contents, jsonArg ?? '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver.applyReviver(reviver, { '': res }, '', res) - : res; - } - /** - * A JSON representation of the document `contents`. - * - * @param jsonArg Used by `JSON.stringify` to indicate the array index or - * property name. - */ - toJSON(jsonArg, onAnchor) { - return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); - } - /** A YAML representation of the document. */ - toString(options = {}) { - if (this.errors.length > 0) - throw new Error('Document with errors cannot be stringified'); - if ('indent' in options && - (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { - const s = JSON.stringify(options.indent); - throw new Error(`"indent" option must be a positive integer, not ${s}`); - } - return stringifyDocument.stringifyDocument(this, options); - } -} -function assertCollection(contents) { - if (identity.isCollection(contents)) - return true; - throw new Error('Expected a YAML collection as document contents'); -} - -exports.Document = Document; diff --git a/bin/node_modules/yaml/dist/doc/anchors.d.ts b/bin/node_modules/yaml/dist/doc/anchors.d.ts deleted file mode 100644 index f5e967c..0000000 --- a/bin/node_modules/yaml/dist/doc/anchors.d.ts +++ /dev/null @@ -1,24 +0,0 @@ -import type { Node } from '../nodes/Node.js'; -import type { Document } from './Document.js'; -/** - * Verify that the input string is a valid anchor. - * - * Will throw on errors. - */ -export declare function anchorIsValid(anchor: string): true; -export declare function anchorNames(root: Document | Node): Set; -/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ -export declare function findNewAnchor(prefix: string, exclude: Set): string; -export declare function createNodeAnchors(doc: Document, prefix: string): { - onAnchor: (source: unknown) => string; - /** - * With circular references, the source node is only resolved after all - * of its child nodes are. This is why anchors are set only after all of - * the nodes have been created. - */ - setAnchors: () => void; - sourceObjects: Map; -}; diff --git a/bin/node_modules/yaml/dist/doc/anchors.js b/bin/node_modules/yaml/dist/doc/anchors.js deleted file mode 100644 index 223639a..0000000 --- a/bin/node_modules/yaml/dist/doc/anchors.js +++ /dev/null @@ -1,77 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var visit = require('../visit.js'); - -/** - * Verify that the input string is a valid anchor. - * - * Will throw on errors. - */ -function anchorIsValid(anchor) { - if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { - const sa = JSON.stringify(anchor); - const msg = `Anchor must not contain whitespace or control characters: ${sa}`; - throw new Error(msg); - } - return true; -} -function anchorNames(root) { - const anchors = new Set(); - visit.visit(root, { - Value(_key, node) { - if (node.anchor) - anchors.add(node.anchor); - } - }); - return anchors; -} -/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ -function findNewAnchor(prefix, exclude) { - for (let i = 1; true; ++i) { - const name = `${prefix}${i}`; - if (!exclude.has(name)) - return name; - } -} -function createNodeAnchors(doc, prefix) { - const aliasObjects = []; - const sourceObjects = new Map(); - let prevAnchors = null; - return { - onAnchor: (source) => { - aliasObjects.push(source); - if (!prevAnchors) - prevAnchors = anchorNames(doc); - const anchor = findNewAnchor(prefix, prevAnchors); - prevAnchors.add(anchor); - return anchor; - }, - /** - * With circular references, the source node is only resolved after all - * of its child nodes are. This is why anchors are set only after all of - * the nodes have been created. - */ - setAnchors: () => { - for (const source of aliasObjects) { - const ref = sourceObjects.get(source); - if (typeof ref === 'object' && - ref.anchor && - (identity.isScalar(ref.node) || identity.isCollection(ref.node))) { - ref.node.anchor = ref.anchor; - } - else { - const error = new Error('Failed to resolve repeated object (this should not happen)'); - error.source = source; - throw error; - } - } - }, - sourceObjects - }; -} - -exports.anchorIsValid = anchorIsValid; -exports.anchorNames = anchorNames; -exports.createNodeAnchors = createNodeAnchors; -exports.findNewAnchor = findNewAnchor; diff --git a/bin/node_modules/yaml/dist/doc/applyReviver.d.ts b/bin/node_modules/yaml/dist/doc/applyReviver.d.ts deleted file mode 100644 index e125b08..0000000 --- a/bin/node_modules/yaml/dist/doc/applyReviver.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -export type Reviver = (key: unknown, value: unknown) => unknown; -/** - * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, - * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the - * 2021 edition: https://tc39.es/ecma262/#sec-json.parse - * - * Includes extensions for handling Map and Set objects. - */ -export declare function applyReviver(reviver: Reviver, obj: unknown, key: unknown, val: any): unknown; diff --git a/bin/node_modules/yaml/dist/doc/applyReviver.js b/bin/node_modules/yaml/dist/doc/applyReviver.js deleted file mode 100644 index 8734579..0000000 --- a/bin/node_modules/yaml/dist/doc/applyReviver.js +++ /dev/null @@ -1,56 +0,0 @@ -'use strict'; - -/** - * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, - * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the - * 2021 edition: https://tc39.es/ecma262/#sec-json.parse - * - * Includes extensions for handling Map and Set objects. - */ -function applyReviver(reviver, obj, key, val) { - if (val && typeof val === 'object') { - if (Array.isArray(val)) { - for (let i = 0, len = val.length; i < len; ++i) { - const v0 = val[i]; - const v1 = applyReviver(reviver, val, String(i), v0); - if (v1 === undefined) - delete val[i]; - else if (v1 !== v0) - val[i] = v1; - } - } - else if (val instanceof Map) { - for (const k of Array.from(val.keys())) { - const v0 = val.get(k); - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - val.delete(k); - else if (v1 !== v0) - val.set(k, v1); - } - } - else if (val instanceof Set) { - for (const v0 of Array.from(val)) { - const v1 = applyReviver(reviver, val, v0, v0); - if (v1 === undefined) - val.delete(v0); - else if (v1 !== v0) { - val.delete(v0); - val.add(v1); - } - } - } - else { - for (const [k, v0] of Object.entries(val)) { - const v1 = applyReviver(reviver, val, k, v0); - if (v1 === undefined) - delete val[k]; - else if (v1 !== v0) - val[k] = v1; - } - } - } - return reviver.call(obj, key, val); -} - -exports.applyReviver = applyReviver; diff --git a/bin/node_modules/yaml/dist/doc/createNode.d.ts b/bin/node_modules/yaml/dist/doc/createNode.d.ts deleted file mode 100644 index 1619981..0000000 --- a/bin/node_modules/yaml/dist/doc/createNode.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type { Node } from '../nodes/Node.js'; -import type { Schema } from '../schema/Schema.js'; -import type { CollectionTag, ScalarTag } from '../schema/types.js'; -import type { Replacer } from './Document.js'; -export interface CreateNodeContext { - aliasDuplicateObjects: boolean; - keepUndefined: boolean; - onAnchor: (source: unknown) => string; - onTagObj?: (tagObj: ScalarTag | CollectionTag) => void; - sourceObjects: Map; - replacer?: Replacer; - schema: Schema; -} -export declare function createNode(value: unknown, tagName: string | undefined, ctx: CreateNodeContext): Node; diff --git a/bin/node_modules/yaml/dist/doc/createNode.js b/bin/node_modules/yaml/dist/doc/createNode.js deleted file mode 100644 index 64dd31a..0000000 --- a/bin/node_modules/yaml/dist/doc/createNode.js +++ /dev/null @@ -1,91 +0,0 @@ -'use strict'; - -var Alias = require('../nodes/Alias.js'); -var identity = require('../nodes/identity.js'); -var Scalar = require('../nodes/Scalar.js'); - -const defaultTagPrefix = 'tag:yaml.org,2002:'; -function findTagObject(value, tagName, tags) { - if (tagName) { - const match = tags.filter(t => t.tag === tagName); - const tagObj = match.find(t => !t.format) ?? match[0]; - if (!tagObj) - throw new Error(`Tag ${tagName} not found`); - return tagObj; - } - return tags.find(t => t.identify?.(value) && !t.format); -} -function createNode(value, tagName, ctx) { - if (identity.isDocument(value)) - value = value.contents; - if (identity.isNode(value)) - return value; - if (identity.isPair(value)) { - const map = ctx.schema[identity.MAP].createNode?.(ctx.schema, null, ctx); - map.items.push(value); - return map; - } - if (value instanceof String || - value instanceof Number || - value instanceof Boolean || - (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere - ) { - // https://tc39.es/ecma262/#sec-serializejsonproperty - value = value.valueOf(); - } - const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; - // Detect duplicate references to the same object & use Alias nodes for all - // after first. The `ref` wrapper allows for circular references to resolve. - let ref = undefined; - if (aliasDuplicateObjects && value && typeof value === 'object') { - ref = sourceObjects.get(value); - if (ref) { - if (!ref.anchor) - ref.anchor = onAnchor(value); - return new Alias.Alias(ref.anchor); - } - else { - ref = { anchor: null, node: null }; - sourceObjects.set(value, ref); - } - } - if (tagName?.startsWith('!!')) - tagName = defaultTagPrefix + tagName.slice(2); - let tagObj = findTagObject(value, tagName, schema.tags); - if (!tagObj) { - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - value = value.toJSON(); - } - if (!value || typeof value !== 'object') { - const node = new Scalar.Scalar(value); - if (ref) - ref.node = node; - return node; - } - tagObj = - value instanceof Map - ? schema[identity.MAP] - : Symbol.iterator in Object(value) - ? schema[identity.SEQ] - : schema[identity.MAP]; - } - if (onTagObj) { - onTagObj(tagObj); - delete ctx.onTagObj; - } - const node = tagObj?.createNode - ? tagObj.createNode(ctx.schema, value, ctx) - : typeof tagObj?.nodeClass?.from === 'function' - ? tagObj.nodeClass.from(ctx.schema, value, ctx) - : new Scalar.Scalar(value); - if (tagName) - node.tag = tagName; - else if (!tagObj.default) - node.tag = tagObj.tag; - if (ref) - ref.node = node; - return node; -} - -exports.createNode = createNode; diff --git a/bin/node_modules/yaml/dist/doc/directives.d.ts b/bin/node_modules/yaml/dist/doc/directives.d.ts deleted file mode 100644 index ff8a2cb..0000000 --- a/bin/node_modules/yaml/dist/doc/directives.d.ts +++ /dev/null @@ -1,49 +0,0 @@ -import type { Document } from './Document.js'; -export declare class Directives { - static defaultYaml: Directives['yaml']; - static defaultTags: Directives['tags']; - yaml: { - version: '1.1' | '1.2' | 'next'; - explicit?: boolean; - }; - tags: Record; - /** - * The directives-end/doc-start marker `---`. If `null`, a marker may still be - * included in the document's stringified representation. - */ - docStart: true | null; - /** The doc-end marker `...`. */ - docEnd: boolean; - /** - * Used when parsing YAML 1.1, where: - * > If the document specifies no directives, it is parsed using the same - * > settings as the previous document. If the document does specify any - * > directives, all directives of previous documents, if any, are ignored. - */ - private atNextDocument?; - constructor(yaml?: Directives['yaml'], tags?: Directives['tags']); - clone(): Directives; - /** - * During parsing, get a Directives instance for the current document and - * update the stream state according to the current version's spec. - */ - atDocument(): Directives; - /** - * @param onError - May be called even if the action was successful - * @returns `true` on success - */ - add(line: string, onError: (offset: number, message: string, warning?: boolean) => void): boolean; - /** - * Resolves a tag, matching handles to those defined in %TAG directives. - * - * @returns Resolved tag, which may also be the non-specific tag `'!'` or a - * `'!local'` tag, or `null` if unresolvable. - */ - tagName(source: string, onError: (message: string) => void): string | null; - /** - * Given a fully resolved tag, returns its printable string form, - * taking into account current tag prefixes and defaults. - */ - tagString(tag: string): string; - toString(doc?: Document): string; -} diff --git a/bin/node_modules/yaml/dist/doc/directives.js b/bin/node_modules/yaml/dist/doc/directives.js deleted file mode 100644 index e13b10e..0000000 --- a/bin/node_modules/yaml/dist/doc/directives.js +++ /dev/null @@ -1,178 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var visit = require('../visit.js'); - -const escapeChars = { - '!': '%21', - ',': '%2C', - '[': '%5B', - ']': '%5D', - '{': '%7B', - '}': '%7D' -}; -const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); -class Directives { - constructor(yaml, tags) { - /** - * The directives-end/doc-start marker `---`. If `null`, a marker may still be - * included in the document's stringified representation. - */ - this.docStart = null; - /** The doc-end marker `...`. */ - this.docEnd = false; - this.yaml = Object.assign({}, Directives.defaultYaml, yaml); - this.tags = Object.assign({}, Directives.defaultTags, tags); - } - clone() { - const copy = new Directives(this.yaml, this.tags); - copy.docStart = this.docStart; - return copy; - } - /** - * During parsing, get a Directives instance for the current document and - * update the stream state according to the current version's spec. - */ - atDocument() { - const res = new Directives(this.yaml, this.tags); - switch (this.yaml.version) { - case '1.1': - this.atNextDocument = true; - break; - case '1.2': - this.atNextDocument = false; - this.yaml = { - explicit: Directives.defaultYaml.explicit, - version: '1.2' - }; - this.tags = Object.assign({}, Directives.defaultTags); - break; - } - return res; - } - /** - * @param onError - May be called even if the action was successful - * @returns `true` on success - */ - add(line, onError) { - if (this.atNextDocument) { - this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; - this.tags = Object.assign({}, Directives.defaultTags); - this.atNextDocument = false; - } - const parts = line.trim().split(/[ \t]+/); - const name = parts.shift(); - switch (name) { - case '%TAG': { - if (parts.length !== 2) { - onError(0, '%TAG directive should contain exactly two parts'); - if (parts.length < 2) - return false; - } - const [handle, prefix] = parts; - this.tags[handle] = prefix; - return true; - } - case '%YAML': { - this.yaml.explicit = true; - if (parts.length !== 1) { - onError(0, '%YAML directive should contain exactly one part'); - return false; - } - const [version] = parts; - if (version === '1.1' || version === '1.2') { - this.yaml.version = version; - return true; - } - else { - const isValid = /^\d+\.\d+$/.test(version); - onError(6, `Unsupported YAML version ${version}`, isValid); - return false; - } - } - default: - onError(0, `Unknown directive ${name}`, true); - return false; - } - } - /** - * Resolves a tag, matching handles to those defined in %TAG directives. - * - * @returns Resolved tag, which may also be the non-specific tag `'!'` or a - * `'!local'` tag, or `null` if unresolvable. - */ - tagName(source, onError) { - if (source === '!') - return '!'; // non-specific tag - if (source[0] !== '!') { - onError(`Not a valid tag: ${source}`); - return null; - } - if (source[1] === '<') { - const verbatim = source.slice(2, -1); - if (verbatim === '!' || verbatim === '!!') { - onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); - return null; - } - if (source[source.length - 1] !== '>') - onError('Verbatim tags must end with a >'); - return verbatim; - } - const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/s); - if (!suffix) - onError(`The ${source} tag has no suffix`); - const prefix = this.tags[handle]; - if (prefix) { - try { - return prefix + decodeURIComponent(suffix); - } - catch (error) { - onError(String(error)); - return null; - } - } - if (handle === '!') - return source; // local tag - onError(`Could not resolve tag: ${source}`); - return null; - } - /** - * Given a fully resolved tag, returns its printable string form, - * taking into account current tag prefixes and defaults. - */ - tagString(tag) { - for (const [handle, prefix] of Object.entries(this.tags)) { - if (tag.startsWith(prefix)) - return handle + escapeTagName(tag.substring(prefix.length)); - } - return tag[0] === '!' ? tag : `!<${tag}>`; - } - toString(doc) { - const lines = this.yaml.explicit - ? [`%YAML ${this.yaml.version || '1.2'}`] - : []; - const tagEntries = Object.entries(this.tags); - let tagNames; - if (doc && tagEntries.length > 0 && identity.isNode(doc.contents)) { - const tags = {}; - visit.visit(doc.contents, (_key, node) => { - if (identity.isNode(node) && node.tag) - tags[node.tag] = true; - }); - tagNames = Object.keys(tags); - } - else - tagNames = []; - for (const [handle, prefix] of tagEntries) { - if (handle === '!!' && prefix === 'tag:yaml.org,2002:') - continue; - if (!doc || tagNames.some(tn => tn.startsWith(prefix))) - lines.push(`%TAG ${handle} ${prefix}`); - } - return lines.join('\n'); - } -} -Directives.defaultYaml = { explicit: false, version: '1.2' }; -Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; - -exports.Directives = Directives; diff --git a/bin/node_modules/yaml/dist/errors.d.ts b/bin/node_modules/yaml/dist/errors.d.ts deleted file mode 100644 index 1b7b612..0000000 --- a/bin/node_modules/yaml/dist/errors.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { LineCounter } from './parse/line-counter'; -export type ErrorCode = 'ALIAS_PROPS' | 'BAD_ALIAS' | 'BAD_DIRECTIVE' | 'BAD_DQ_ESCAPE' | 'BAD_INDENT' | 'BAD_PROP_ORDER' | 'BAD_SCALAR_START' | 'BLOCK_AS_IMPLICIT_KEY' | 'BLOCK_IN_FLOW' | 'DUPLICATE_KEY' | 'IMPOSSIBLE' | 'KEY_OVER_1024_CHARS' | 'MISSING_CHAR' | 'MULTILINE_IMPLICIT_KEY' | 'MULTIPLE_ANCHORS' | 'MULTIPLE_DOCS' | 'MULTIPLE_TAGS' | 'TAB_AS_INDENT' | 'TAG_RESOLVE_FAILED' | 'UNEXPECTED_TOKEN' | 'BAD_COLLECTION_TYPE'; -export type LinePos = { - line: number; - col: number; -}; -export declare class YAMLError extends Error { - name: 'YAMLParseError' | 'YAMLWarning'; - code: ErrorCode; - message: string; - pos: [number, number]; - linePos?: [LinePos] | [LinePos, LinePos]; - constructor(name: YAMLError['name'], pos: [number, number], code: ErrorCode, message: string); -} -export declare class YAMLParseError extends YAMLError { - constructor(pos: [number, number], code: ErrorCode, message: string); -} -export declare class YAMLWarning extends YAMLError { - constructor(pos: [number, number], code: ErrorCode, message: string); -} -export declare const prettifyError: (src: string, lc: LineCounter) => (error: YAMLError) => void; diff --git a/bin/node_modules/yaml/dist/errors.js b/bin/node_modules/yaml/dist/errors.js deleted file mode 100644 index 9d04c60..0000000 --- a/bin/node_modules/yaml/dist/errors.js +++ /dev/null @@ -1,62 +0,0 @@ -'use strict'; - -class YAMLError extends Error { - constructor(name, pos, code, message) { - super(); - this.name = name; - this.code = code; - this.message = message; - this.pos = pos; - } -} -class YAMLParseError extends YAMLError { - constructor(pos, code, message) { - super('YAMLParseError', pos, code, message); - } -} -class YAMLWarning extends YAMLError { - constructor(pos, code, message) { - super('YAMLWarning', pos, code, message); - } -} -const prettifyError = (src, lc) => (error) => { - if (error.pos[0] === -1) - return; - error.linePos = error.pos.map(pos => lc.linePos(pos)); - const { line, col } = error.linePos[0]; - error.message += ` at line ${line}, column ${col}`; - let ci = col - 1; - let lineStr = src - .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) - .replace(/[\n\r]+$/, ''); - // Trim to max 80 chars, keeping col position near the middle - if (ci >= 60 && lineStr.length > 80) { - const trimStart = Math.min(ci - 39, lineStr.length - 79); - lineStr = '…' + lineStr.substring(trimStart); - ci -= trimStart - 1; - } - if (lineStr.length > 80) - lineStr = lineStr.substring(0, 79) + '…'; - // Include previous line in context if pointing at line start - if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { - // Regexp won't match if start is trimmed - let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); - if (prev.length > 80) - prev = prev.substring(0, 79) + '…\n'; - lineStr = prev + lineStr; - } - if (/[^ ]/.test(lineStr)) { - let count = 1; - const end = error.linePos[1]; - if (end && end.line === line && end.col > col) { - count = Math.max(1, Math.min(end.col - col, 80 - ci)); - } - const pointer = ' '.repeat(ci) + '^'.repeat(count); - error.message += `:\n\n${lineStr}\n${pointer}\n`; - } -}; - -exports.YAMLError = YAMLError; -exports.YAMLParseError = YAMLParseError; -exports.YAMLWarning = YAMLWarning; -exports.prettifyError = prettifyError; diff --git a/bin/node_modules/yaml/dist/index.d.ts b/bin/node_modules/yaml/dist/index.d.ts deleted file mode 100644 index 66e784e..0000000 --- a/bin/node_modules/yaml/dist/index.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -export { Composer } from './compose/composer.js'; -export { Document } from './doc/Document.js'; -export { Schema } from './schema/Schema.js'; -export { ErrorCode, YAMLError, YAMLParseError, YAMLWarning } from './errors.js'; -export { Alias } from './nodes/Alias.js'; -export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq } from './nodes/identity.js'; -export { Node, ParsedNode, Range } from './nodes/Node.js'; -export { Pair } from './nodes/Pair.js'; -export { Scalar } from './nodes/Scalar.js'; -export { YAMLMap } from './nodes/YAMLMap.js'; -export { YAMLSeq } from './nodes/YAMLSeq.js'; -export type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options.js'; -export * as CST from './parse/cst.js'; -export { Lexer } from './parse/lexer.js'; -export { LineCounter } from './parse/line-counter.js'; -export { Parser } from './parse/parser.js'; -export { EmptyStream, parse, parseAllDocuments, parseDocument, stringify } from './public-api.js'; -export type { TagId, Tags } from './schema/tags'; -export type { CollectionTag, ScalarTag } from './schema/types'; -export type { YAMLOMap } from './schema/yaml-1.1/omap'; -export type { YAMLSet } from './schema/yaml-1.1/set'; -export { asyncVisitor, asyncVisitorFn, visit, visitAsync, visitor, visitorFn } from './visit.js'; diff --git a/bin/node_modules/yaml/dist/index.js b/bin/node_modules/yaml/dist/index.js deleted file mode 100644 index 18c0cb6..0000000 --- a/bin/node_modules/yaml/dist/index.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict'; - -var composer = require('./compose/composer.js'); -var Document = require('./doc/Document.js'); -var Schema = require('./schema/Schema.js'); -var errors = require('./errors.js'); -var Alias = require('./nodes/Alias.js'); -var identity = require('./nodes/identity.js'); -var Pair = require('./nodes/Pair.js'); -var Scalar = require('./nodes/Scalar.js'); -var YAMLMap = require('./nodes/YAMLMap.js'); -var YAMLSeq = require('./nodes/YAMLSeq.js'); -var cst = require('./parse/cst.js'); -var lexer = require('./parse/lexer.js'); -var lineCounter = require('./parse/line-counter.js'); -var parser = require('./parse/parser.js'); -var publicApi = require('./public-api.js'); -var visit = require('./visit.js'); - - - -exports.Composer = composer.Composer; -exports.Document = Document.Document; -exports.Schema = Schema.Schema; -exports.YAMLError = errors.YAMLError; -exports.YAMLParseError = errors.YAMLParseError; -exports.YAMLWarning = errors.YAMLWarning; -exports.Alias = Alias.Alias; -exports.isAlias = identity.isAlias; -exports.isCollection = identity.isCollection; -exports.isDocument = identity.isDocument; -exports.isMap = identity.isMap; -exports.isNode = identity.isNode; -exports.isPair = identity.isPair; -exports.isScalar = identity.isScalar; -exports.isSeq = identity.isSeq; -exports.Pair = Pair.Pair; -exports.Scalar = Scalar.Scalar; -exports.YAMLMap = YAMLMap.YAMLMap; -exports.YAMLSeq = YAMLSeq.YAMLSeq; -exports.CST = cst; -exports.Lexer = lexer.Lexer; -exports.LineCounter = lineCounter.LineCounter; -exports.Parser = parser.Parser; -exports.parse = publicApi.parse; -exports.parseAllDocuments = publicApi.parseAllDocuments; -exports.parseDocument = publicApi.parseDocument; -exports.stringify = publicApi.stringify; -exports.visit = visit.visit; -exports.visitAsync = visit.visitAsync; diff --git a/bin/node_modules/yaml/dist/log.d.ts b/bin/node_modules/yaml/dist/log.d.ts deleted file mode 100644 index 5e21612..0000000 --- a/bin/node_modules/yaml/dist/log.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -export type LogLevelId = 'silent' | 'error' | 'warn' | 'debug'; -export declare function debug(logLevel: LogLevelId, ...messages: any[]): void; -export declare function warn(logLevel: LogLevelId, warning: string | Error): void; diff --git a/bin/node_modules/yaml/dist/log.js b/bin/node_modules/yaml/dist/log.js deleted file mode 100644 index afb6895..0000000 --- a/bin/node_modules/yaml/dist/log.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict'; - -function debug(logLevel, ...messages) { - if (logLevel === 'debug') - console.log(...messages); -} -function warn(logLevel, warning) { - if (logLevel === 'debug' || logLevel === 'warn') { - // https://github.com/typescript-eslint/typescript-eslint/issues/7478 - // eslint-disable-next-line @typescript-eslint/prefer-optional-chain - if (typeof process !== 'undefined' && process.emitWarning) - process.emitWarning(warning); - else - console.warn(warning); - } -} - -exports.debug = debug; -exports.warn = warn; diff --git a/bin/node_modules/yaml/dist/nodes/Alias.d.ts b/bin/node_modules/yaml/dist/nodes/Alias.d.ts deleted file mode 100644 index b55ebae..0000000 --- a/bin/node_modules/yaml/dist/nodes/Alias.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { FlowScalar } from '../parse/cst.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import { NodeBase, Range } from './Node.js'; -import type { Scalar } from './Scalar'; -import { ToJSContext } from './toJS.js'; -import type { YAMLMap } from './YAMLMap.js'; -import type { YAMLSeq } from './YAMLSeq.js'; -export declare namespace Alias { - interface Parsed extends Alias { - range: Range; - srcToken?: FlowScalar & { - type: 'alias'; - }; - } -} -export declare class Alias extends NodeBase { - source: string; - anchor?: never; - constructor(source: string); - /** - * Resolve the value of this alias within `doc`, finding the last - * instance of the `source` anchor before this node. - */ - resolve(doc: Document): Scalar | YAMLMap | YAMLSeq | undefined; - toJSON(_arg?: unknown, ctx?: ToJSContext): {} | null; - toString(ctx?: StringifyContext, _onComment?: () => void, _onChompKeep?: () => void): string; -} diff --git a/bin/node_modules/yaml/dist/nodes/Alias.js b/bin/node_modules/yaml/dist/nodes/Alias.js deleted file mode 100644 index 46b37d9..0000000 --- a/bin/node_modules/yaml/dist/nodes/Alias.js +++ /dev/null @@ -1,103 +0,0 @@ -'use strict'; - -var anchors = require('../doc/anchors.js'); -var visit = require('../visit.js'); -var identity = require('./identity.js'); -var Node = require('./Node.js'); -var toJS = require('./toJS.js'); - -class Alias extends Node.NodeBase { - constructor(source) { - super(identity.ALIAS); - this.source = source; - Object.defineProperty(this, 'tag', { - set() { - throw new Error('Alias nodes cannot have tags'); - } - }); - } - /** - * Resolve the value of this alias within `doc`, finding the last - * instance of the `source` anchor before this node. - */ - resolve(doc) { - let found = undefined; - visit.visit(doc, { - Node: (_key, node) => { - if (node === this) - return visit.visit.BREAK; - if (node.anchor === this.source) - found = node; - } - }); - return found; - } - toJSON(_arg, ctx) { - if (!ctx) - return { source: this.source }; - const { anchors, doc, maxAliasCount } = ctx; - const source = this.resolve(doc); - if (!source) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new ReferenceError(msg); - } - let data = anchors.get(source); - if (!data) { - // Resolve anchors for Node.prototype.toJS() - toJS.toJS(source, null, ctx); - data = anchors.get(source); - } - /* istanbul ignore if */ - if (!data || data.res === undefined) { - const msg = 'This should not happen: Alias anchor was not resolved?'; - throw new ReferenceError(msg); - } - if (maxAliasCount >= 0) { - data.count += 1; - if (data.aliasCount === 0) - data.aliasCount = getAliasCount(doc, source, anchors); - if (data.count * data.aliasCount > maxAliasCount) { - const msg = 'Excessive alias count indicates a resource exhaustion attack'; - throw new ReferenceError(msg); - } - } - return data.res; - } - toString(ctx, _onComment, _onChompKeep) { - const src = `*${this.source}`; - if (ctx) { - anchors.anchorIsValid(this.source); - if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { - const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; - throw new Error(msg); - } - if (ctx.implicitKey) - return `${src} `; - } - return src; - } -} -function getAliasCount(doc, node, anchors) { - if (identity.isAlias(node)) { - const source = node.resolve(doc); - const anchor = anchors && source && anchors.get(source); - return anchor ? anchor.count * anchor.aliasCount : 0; - } - else if (identity.isCollection(node)) { - let count = 0; - for (const item of node.items) { - const c = getAliasCount(doc, item, anchors); - if (c > count) - count = c; - } - return count; - } - else if (identity.isPair(node)) { - const kc = getAliasCount(doc, node.key, anchors); - const vc = getAliasCount(doc, node.value, anchors); - return Math.max(kc, vc); - } - return 1; -} - -exports.Alias = Alias; diff --git a/bin/node_modules/yaml/dist/nodes/Collection.d.ts b/bin/node_modules/yaml/dist/nodes/Collection.d.ts deleted file mode 100644 index 981ca11..0000000 --- a/bin/node_modules/yaml/dist/nodes/Collection.d.ts +++ /dev/null @@ -1,74 +0,0 @@ -import type { Schema } from '../schema/Schema.js'; -import { NODE_TYPE } from './identity.js'; -import { NodeBase } from './Node.js'; -export declare function collectionFromPath(schema: Schema, path: unknown[], value: unknown): import("./Node.js").Node; -export declare const isEmptyPath: (path: Iterable | null | undefined) => path is null | undefined; -export declare abstract class Collection extends NodeBase { - static maxFlowStringSingleLineLength: number; - schema: Schema | undefined; - [NODE_TYPE]: symbol; - items: unknown[]; - /** An optional anchor on this node. Used by alias nodes. */ - anchor?: string; - /** - * If true, stringify this and all child nodes using flow rather than - * block styles. - */ - flow?: boolean; - constructor(type: symbol, schema?: Schema); - /** - * Create a copy of this collection. - * - * @param schema - If defined, overwrites the original's schema - */ - clone(schema?: Schema): Collection; - /** Adds a value to the collection. */ - abstract add(value: unknown): void; - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - abstract delete(key: unknown): boolean; - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - abstract get(key: unknown, keepScalar?: boolean): unknown; - /** - * Checks if the collection includes a value with the key `key`. - */ - abstract has(key: unknown): boolean; - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - abstract set(key: unknown, value: unknown): void; - /** - * Adds a value to the collection. For `!!map` and `!!omap` the value must - * be a Pair instance or a `{ key, value }` object, which may not have a key - * that already exists in the map. - */ - addIn(path: Iterable, value: unknown): void; - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - deleteIn(path: Iterable): boolean; - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path: Iterable, keepScalar?: boolean): unknown; - hasAllNullValues(allowScalar?: boolean): boolean; - /** - * Checks if the collection includes a value with the key `key`. - */ - hasIn(path: Iterable): boolean; - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path: Iterable, value: unknown): void; -} diff --git a/bin/node_modules/yaml/dist/nodes/Collection.js b/bin/node_modules/yaml/dist/nodes/Collection.js deleted file mode 100644 index 1c03294..0000000 --- a/bin/node_modules/yaml/dist/nodes/Collection.js +++ /dev/null @@ -1,152 +0,0 @@ -'use strict'; - -var createNode = require('../doc/createNode.js'); -var identity = require('./identity.js'); -var Node = require('./Node.js'); - -function collectionFromPath(schema, path, value) { - let v = value; - for (let i = path.length - 1; i >= 0; --i) { - const k = path[i]; - if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { - const a = []; - a[k] = v; - v = a; - } - else { - v = new Map([[k, v]]); - } - } - return createNode.createNode(v, undefined, { - aliasDuplicateObjects: false, - keepUndefined: false, - onAnchor: () => { - throw new Error('This should not happen, please report a bug.'); - }, - schema, - sourceObjects: new Map() - }); -} -// Type guard is intentionally a little wrong so as to be more useful, -// as it does not cover untypable empty non-string iterables (e.g. []). -const isEmptyPath = (path) => path == null || - (typeof path === 'object' && !!path[Symbol.iterator]().next().done); -class Collection extends Node.NodeBase { - constructor(type, schema) { - super(type); - Object.defineProperty(this, 'schema', { - value: schema, - configurable: true, - enumerable: false, - writable: true - }); - } - /** - * Create a copy of this collection. - * - * @param schema - If defined, overwrites the original's schema - */ - clone(schema) { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (schema) - copy.schema = schema; - copy.items = copy.items.map(it => identity.isNode(it) || identity.isPair(it) ? it.clone(schema) : it); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** - * Adds a value to the collection. For `!!map` and `!!omap` the value must - * be a Pair instance or a `{ key, value }` object, which may not have a key - * that already exists in the map. - */ - addIn(path, value) { - if (isEmptyPath(path)) - this.add(value); - else { - const [key, ...rest] = path; - const node = this.get(key, true); - if (identity.isCollection(node)) - node.addIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } - /** - * Removes a value from the collection. - * @returns `true` if the item was found and removed. - */ - deleteIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.delete(key); - const node = this.get(key, true); - if (identity.isCollection(node)) - return node.deleteIn(rest); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - */ - getIn(path, keepScalar) { - const [key, ...rest] = path; - const node = this.get(key, true); - if (rest.length === 0) - return !keepScalar && identity.isScalar(node) ? node.value : node; - else - return identity.isCollection(node) ? node.getIn(rest, keepScalar) : undefined; - } - hasAllNullValues(allowScalar) { - return this.items.every(node => { - if (!identity.isPair(node)) - return false; - const n = node.value; - return (n == null || - (allowScalar && - identity.isScalar(n) && - n.value == null && - !n.commentBefore && - !n.comment && - !n.tag)); - }); - } - /** - * Checks if the collection includes a value with the key `key`. - */ - hasIn(path) { - const [key, ...rest] = path; - if (rest.length === 0) - return this.has(key); - const node = this.get(key, true); - return identity.isCollection(node) ? node.hasIn(rest) : false; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - */ - setIn(path, value) { - const [key, ...rest] = path; - if (rest.length === 0) { - this.set(key, value); - } - else { - const node = this.get(key, true); - if (identity.isCollection(node)) - node.setIn(rest, value); - else if (node === undefined && this.schema) - this.set(key, collectionFromPath(this.schema, rest, value)); - else - throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); - } - } -} -Collection.maxFlowStringSingleLineLength = 60; - -exports.Collection = Collection; -exports.collectionFromPath = collectionFromPath; -exports.isEmptyPath = isEmptyPath; diff --git a/bin/node_modules/yaml/dist/nodes/Node.d.ts b/bin/node_modules/yaml/dist/nodes/Node.d.ts deleted file mode 100644 index fe91ba6..0000000 --- a/bin/node_modules/yaml/dist/nodes/Node.d.ts +++ /dev/null @@ -1,46 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { ToJSOptions } from '../options.js'; -import { Token } from '../parse/cst.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import type { Alias } from './Alias.js'; -import { NODE_TYPE } from './identity.js'; -import type { Scalar } from './Scalar.js'; -import type { YAMLMap } from './YAMLMap.js'; -import type { YAMLSeq } from './YAMLSeq.js'; -export type Node = Alias | Scalar | YAMLMap | YAMLSeq; -/** Utility type mapper */ -export type NodeType = T extends string | number | bigint | boolean | null | undefined ? Scalar : T extends Date ? Scalar : T extends Array ? YAMLSeq> : T extends { - [key: string]: any; -} ? YAMLMap, NodeType> : T extends { - [key: number]: any; -} ? YAMLMap, NodeType> : Node; -export type ParsedNode = Alias.Parsed | Scalar.Parsed | YAMLMap.Parsed | YAMLSeq.Parsed; -export type Range = [number, number, number]; -export declare abstract class NodeBase { - readonly [NODE_TYPE]: symbol; - /** A comment on or immediately after this */ - comment?: string | null; - /** A comment before this */ - commentBefore?: string | null; - /** - * The `[start, value-end, node-end]` character offsets for the part of the - * source parsed into this node (undefined if not parsed). The `value-end` - * and `node-end` positions are themselves not included in their respective - * ranges. - */ - range?: Range | null; - /** A blank line before this node and its commentBefore */ - spaceBefore?: boolean; - /** The CST token that was composed into this node. */ - srcToken?: Token; - /** A fully qualified tag, if required */ - tag?: string; - /** A plain JS representation of this node */ - abstract toJSON(): any; - abstract toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; - constructor(type: symbol); - /** Create a copy of this node. */ - clone(): NodeBase; - /** A plain JavaScript representation of this node. */ - toJS(doc: Document, { mapAsMap, maxAliasCount, onAnchor, reviver }?: ToJSOptions): any; -} diff --git a/bin/node_modules/yaml/dist/nodes/Node.js b/bin/node_modules/yaml/dist/nodes/Node.js deleted file mode 100644 index d384e1c..0000000 --- a/bin/node_modules/yaml/dist/nodes/Node.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict'; - -var applyReviver = require('../doc/applyReviver.js'); -var identity = require('./identity.js'); -var toJS = require('./toJS.js'); - -class NodeBase { - constructor(type) { - Object.defineProperty(this, identity.NODE_TYPE, { value: type }); - } - /** Create a copy of this node. */ - clone() { - const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); - if (this.range) - copy.range = this.range.slice(); - return copy; - } - /** A plain JavaScript representation of this node. */ - toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { - if (!identity.isDocument(doc)) - throw new TypeError('A document argument is required'); - const ctx = { - anchors: new Map(), - doc, - keep: true, - mapAsMap: mapAsMap === true, - mapKeyWarned: false, - maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 - }; - const res = toJS.toJS(this, '', ctx); - if (typeof onAnchor === 'function') - for (const { count, res } of ctx.anchors.values()) - onAnchor(res, count); - return typeof reviver === 'function' - ? applyReviver.applyReviver(reviver, { '': res }, '', res) - : res; - } -} - -exports.NodeBase = NodeBase; diff --git a/bin/node_modules/yaml/dist/nodes/Pair.d.ts b/bin/node_modules/yaml/dist/nodes/Pair.d.ts deleted file mode 100644 index 6178d3a..0000000 --- a/bin/node_modules/yaml/dist/nodes/Pair.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { CreateNodeContext } from '../doc/createNode.js'; -import type { CollectionItem } from '../parse/cst.js'; -import type { Schema } from '../schema/Schema.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import { addPairToJSMap } from './addPairToJSMap.js'; -import { NODE_TYPE } from './identity.js'; -import type { ToJSContext } from './toJS.js'; -export declare function createPair(key: unknown, value: unknown, ctx: CreateNodeContext): Pair | import("./YAMLMap.js").YAMLMap | import("./YAMLSeq.js").YAMLSeq>; -export declare class Pair { - readonly [NODE_TYPE]: symbol; - /** Always Node or null when parsed, but can be set to anything. */ - key: K; - /** Always Node or null when parsed, but can be set to anything. */ - value: V | null; - /** The CST token that was composed into this pair. */ - srcToken?: CollectionItem; - constructor(key: K, value?: V | null); - clone(schema?: Schema): Pair; - toJSON(_?: unknown, ctx?: ToJSContext): ReturnType; - toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; -} diff --git a/bin/node_modules/yaml/dist/nodes/Pair.js b/bin/node_modules/yaml/dist/nodes/Pair.js deleted file mode 100644 index ae4c772..0000000 --- a/bin/node_modules/yaml/dist/nodes/Pair.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict'; - -var createNode = require('../doc/createNode.js'); -var stringifyPair = require('../stringify/stringifyPair.js'); -var addPairToJSMap = require('./addPairToJSMap.js'); -var identity = require('./identity.js'); - -function createPair(key, value, ctx) { - const k = createNode.createNode(key, undefined, ctx); - const v = createNode.createNode(value, undefined, ctx); - return new Pair(k, v); -} -class Pair { - constructor(key, value = null) { - Object.defineProperty(this, identity.NODE_TYPE, { value: identity.PAIR }); - this.key = key; - this.value = value; - } - clone(schema) { - let { key, value } = this; - if (identity.isNode(key)) - key = key.clone(schema); - if (identity.isNode(value)) - value = value.clone(schema); - return new Pair(key, value); - } - toJSON(_, ctx) { - const pair = ctx?.mapAsMap ? new Map() : {}; - return addPairToJSMap.addPairToJSMap(ctx, pair, this); - } - toString(ctx, onComment, onChompKeep) { - return ctx?.doc - ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep) - : JSON.stringify(this); - } -} - -exports.Pair = Pair; -exports.createPair = createPair; diff --git a/bin/node_modules/yaml/dist/nodes/Scalar.d.ts b/bin/node_modules/yaml/dist/nodes/Scalar.d.ts deleted file mode 100644 index dd330b2..0000000 --- a/bin/node_modules/yaml/dist/nodes/Scalar.d.ts +++ /dev/null @@ -1,42 +0,0 @@ -import type { BlockScalar, FlowScalar } from '../parse/cst.js'; -import { NodeBase, Range } from './Node.js'; -import { ToJSContext } from './toJS.js'; -export declare const isScalarValue: (value: unknown) => boolean; -export declare namespace Scalar { - interface Parsed extends Scalar { - range: Range; - source: string; - srcToken?: FlowScalar | BlockScalar; - } - type BLOCK_FOLDED = 'BLOCK_FOLDED'; - type BLOCK_LITERAL = 'BLOCK_LITERAL'; - type PLAIN = 'PLAIN'; - type QUOTE_DOUBLE = 'QUOTE_DOUBLE'; - type QUOTE_SINGLE = 'QUOTE_SINGLE'; - type Type = BLOCK_FOLDED | BLOCK_LITERAL | PLAIN | QUOTE_DOUBLE | QUOTE_SINGLE; -} -export declare class Scalar extends NodeBase { - static readonly BLOCK_FOLDED = "BLOCK_FOLDED"; - static readonly BLOCK_LITERAL = "BLOCK_LITERAL"; - static readonly PLAIN = "PLAIN"; - static readonly QUOTE_DOUBLE = "QUOTE_DOUBLE"; - static readonly QUOTE_SINGLE = "QUOTE_SINGLE"; - value: T; - /** An optional anchor on this node. Used by alias nodes. */ - anchor?: string; - /** - * By default (undefined), numbers use decimal notation. - * The YAML 1.2 core schema only supports 'HEX' and 'OCT'. - * The YAML 1.1 schema also supports 'BIN' and 'TIME' - */ - format?: string; - /** If `value` is a number, use this value when stringifying this node. */ - minFractionDigits?: number; - /** Set during parsing to the source string value */ - source?: string; - /** The scalar style used for the node's string representation */ - type?: Scalar.Type; - constructor(value: T); - toJSON(arg?: any, ctx?: ToJSContext): any; - toString(): string; -} diff --git a/bin/node_modules/yaml/dist/nodes/Scalar.js b/bin/node_modules/yaml/dist/nodes/Scalar.js deleted file mode 100644 index bd7d4d2..0000000 --- a/bin/node_modules/yaml/dist/nodes/Scalar.js +++ /dev/null @@ -1,27 +0,0 @@ -'use strict'; - -var identity = require('./identity.js'); -var Node = require('./Node.js'); -var toJS = require('./toJS.js'); - -const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); -class Scalar extends Node.NodeBase { - constructor(value) { - super(identity.SCALAR); - this.value = value; - } - toJSON(arg, ctx) { - return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx); - } - toString() { - return String(this.value); - } -} -Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; -Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; -Scalar.PLAIN = 'PLAIN'; -Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; -Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; - -exports.Scalar = Scalar; -exports.isScalarValue = isScalarValue; diff --git a/bin/node_modules/yaml/dist/nodes/YAMLMap.d.ts b/bin/node_modules/yaml/dist/nodes/YAMLMap.d.ts deleted file mode 100644 index cef75f0..0000000 --- a/bin/node_modules/yaml/dist/nodes/YAMLMap.d.ts +++ /dev/null @@ -1,53 +0,0 @@ -import type { BlockMap, FlowCollection } from '../parse/cst.js'; -import type { Schema } from '../schema/Schema.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import { CreateNodeContext } from '../util.js'; -import { Collection } from './Collection.js'; -import type { ParsedNode, Range } from './Node.js'; -import { Pair } from './Pair.js'; -import { Scalar } from './Scalar.js'; -import type { ToJSContext } from './toJS.js'; -export type MapLike = Map | Set | Record; -export declare function findPair(items: Iterable>, key: unknown): Pair | undefined; -export declare namespace YAMLMap { - interface Parsed extends YAMLMap { - items: Pair[]; - range: Range; - srcToken?: BlockMap | FlowCollection; - } -} -export declare class YAMLMap extends Collection { - static get tagName(): 'tag:yaml.org,2002:map'; - items: Pair[]; - constructor(schema?: Schema); - /** - * A generic collection parsing method that can be extended - * to other node classes that inherit from YAMLMap - */ - static from(schema: Schema, obj: unknown, ctx: CreateNodeContext): YAMLMap; - /** - * Adds a value to the collection. - * - * @param overwrite - If not set `true`, using a key that is already in the - * collection will throw. Otherwise, overwrites the previous value. - */ - add(pair: Pair | { - key: K; - value: V; - }, overwrite?: boolean): void; - delete(key: unknown): boolean; - get(key: unknown, keepScalar: true): Scalar | undefined; - get(key: unknown, keepScalar?: false): V | undefined; - get(key: unknown, keepScalar?: boolean): V | Scalar | undefined; - has(key: unknown): boolean; - set(key: K, value: V): void; - /** - * @param ctx - Conversion context, originally set in Document#toJS() - * @param {Class} Type - If set, forces the returned collection type - * @returns Instance of Type, Map, or Object - */ - toJSON>(_?: unknown, ctx?: ToJSContext, Type?: { - new (): T; - }): any; - toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; -} diff --git a/bin/node_modules/yaml/dist/nodes/YAMLMap.js b/bin/node_modules/yaml/dist/nodes/YAMLMap.js deleted file mode 100644 index 210abbf..0000000 --- a/bin/node_modules/yaml/dist/nodes/YAMLMap.js +++ /dev/null @@ -1,147 +0,0 @@ -'use strict'; - -var stringifyCollection = require('../stringify/stringifyCollection.js'); -var addPairToJSMap = require('./addPairToJSMap.js'); -var Collection = require('./Collection.js'); -var identity = require('./identity.js'); -var Pair = require('./Pair.js'); -var Scalar = require('./Scalar.js'); - -function findPair(items, key) { - const k = identity.isScalar(key) ? key.value : key; - for (const it of items) { - if (identity.isPair(it)) { - if (it.key === key || it.key === k) - return it; - if (identity.isScalar(it.key) && it.key.value === k) - return it; - } - } - return undefined; -} -class YAMLMap extends Collection.Collection { - static get tagName() { - return 'tag:yaml.org,2002:map'; - } - constructor(schema) { - super(identity.MAP, schema); - this.items = []; - } - /** - * A generic collection parsing method that can be extended - * to other node classes that inherit from YAMLMap - */ - static from(schema, obj, ctx) { - const { keepUndefined, replacer } = ctx; - const map = new this(schema); - const add = (key, value) => { - if (typeof replacer === 'function') - value = replacer.call(obj, key, value); - else if (Array.isArray(replacer) && !replacer.includes(key)) - return; - if (value !== undefined || keepUndefined) - map.items.push(Pair.createPair(key, value, ctx)); - }; - if (obj instanceof Map) { - for (const [key, value] of obj) - add(key, value); - } - else if (obj && typeof obj === 'object') { - for (const key of Object.keys(obj)) - add(key, obj[key]); - } - if (typeof schema.sortMapEntries === 'function') { - map.items.sort(schema.sortMapEntries); - } - return map; - } - /** - * Adds a value to the collection. - * - * @param overwrite - If not set `true`, using a key that is already in the - * collection will throw. Otherwise, overwrites the previous value. - */ - add(pair, overwrite) { - let _pair; - if (identity.isPair(pair)) - _pair = pair; - else if (!pair || typeof pair !== 'object' || !('key' in pair)) { - // In TypeScript, this never happens. - _pair = new Pair.Pair(pair, pair?.value); - } - else - _pair = new Pair.Pair(pair.key, pair.value); - const prev = findPair(this.items, _pair.key); - const sortEntries = this.schema?.sortMapEntries; - if (prev) { - if (!overwrite) - throw new Error(`Key ${_pair.key} already set`); - // For scalars, keep the old node & its comments and anchors - if (identity.isScalar(prev.value) && Scalar.isScalarValue(_pair.value)) - prev.value.value = _pair.value; - else - prev.value = _pair.value; - } - else if (sortEntries) { - const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); - if (i === -1) - this.items.push(_pair); - else - this.items.splice(i, 0, _pair); - } - else { - this.items.push(_pair); - } - } - delete(key) { - const it = findPair(this.items, key); - if (!it) - return false; - const del = this.items.splice(this.items.indexOf(it), 1); - return del.length > 0; - } - get(key, keepScalar) { - const it = findPair(this.items, key); - const node = it?.value; - return (!keepScalar && identity.isScalar(node) ? node.value : node) ?? undefined; - } - has(key) { - return !!findPair(this.items, key); - } - set(key, value) { - this.add(new Pair.Pair(key, value), true); - } - /** - * @param ctx - Conversion context, originally set in Document#toJS() - * @param {Class} Type - If set, forces the returned collection type - * @returns Instance of Type, Map, or Object - */ - toJSON(_, ctx, Type) { - const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; - if (ctx?.onCreate) - ctx.onCreate(map); - for (const item of this.items) - addPairToJSMap.addPairToJSMap(ctx, map, item); - return map; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - for (const item of this.items) { - if (!identity.isPair(item)) - throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); - } - if (!ctx.allNullValues && this.hasAllNullValues(false)) - ctx = Object.assign({}, ctx, { allNullValues: true }); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '', - flowChars: { start: '{', end: '}' }, - itemIndent: ctx.indent || '', - onChompKeep, - onComment - }); - } -} - -exports.YAMLMap = YAMLMap; -exports.findPair = findPair; diff --git a/bin/node_modules/yaml/dist/nodes/YAMLSeq.d.ts b/bin/node_modules/yaml/dist/nodes/YAMLSeq.d.ts deleted file mode 100644 index 47fe24d..0000000 --- a/bin/node_modules/yaml/dist/nodes/YAMLSeq.d.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { CreateNodeContext } from '../doc/createNode.js'; -import type { BlockSequence, FlowCollection } from '../parse/cst.js'; -import type { Schema } from '../schema/Schema.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import { Collection } from './Collection.js'; -import type { ParsedNode, Range } from './Node.js'; -import type { Pair } from './Pair.js'; -import { Scalar } from './Scalar.js'; -import { ToJSContext } from './toJS.js'; -export declare namespace YAMLSeq { - interface Parsed = ParsedNode> extends YAMLSeq { - items: T[]; - range: Range; - srcToken?: BlockSequence | FlowCollection; - } -} -export declare class YAMLSeq extends Collection { - static get tagName(): 'tag:yaml.org,2002:seq'; - items: T[]; - constructor(schema?: Schema); - add(value: T): void; - /** - * Removes a value from the collection. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - * - * @returns `true` if the item was found and removed. - */ - delete(key: unknown): boolean; - /** - * Returns item at `key`, or `undefined` if not found. By default unwraps - * scalar values from their surrounding node; to disable set `keepScalar` to - * `true` (collections are always returned intact). - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - get(key: unknown, keepScalar: true): Scalar | undefined; - get(key: unknown, keepScalar?: false): T | undefined; - get(key: unknown, keepScalar?: boolean): T | Scalar | undefined; - /** - * Checks if the collection includes a value with the key `key`. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - has(key: unknown): boolean; - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - * - * If `key` does not contain a representation of an integer, this will throw. - * It may be wrapped in a `Scalar`. - */ - set(key: unknown, value: T): void; - toJSON(_?: unknown, ctx?: ToJSContext): unknown[]; - toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; - static from(schema: Schema, obj: unknown, ctx: CreateNodeContext): YAMLSeq; -} diff --git a/bin/node_modules/yaml/dist/nodes/YAMLSeq.js b/bin/node_modules/yaml/dist/nodes/YAMLSeq.js deleted file mode 100644 index a2af086..0000000 --- a/bin/node_modules/yaml/dist/nodes/YAMLSeq.js +++ /dev/null @@ -1,115 +0,0 @@ -'use strict'; - -var createNode = require('../doc/createNode.js'); -var stringifyCollection = require('../stringify/stringifyCollection.js'); -var Collection = require('./Collection.js'); -var identity = require('./identity.js'); -var Scalar = require('./Scalar.js'); -var toJS = require('./toJS.js'); - -class YAMLSeq extends Collection.Collection { - static get tagName() { - return 'tag:yaml.org,2002:seq'; - } - constructor(schema) { - super(identity.SEQ, schema); - this.items = []; - } - add(value) { - this.items.push(value); - } - /** - * Removes a value from the collection. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - * - * @returns `true` if the item was found and removed. - */ - delete(key) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return false; - const del = this.items.splice(idx, 1); - return del.length > 0; - } - get(key, keepScalar) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - return undefined; - const it = this.items[idx]; - return !keepScalar && identity.isScalar(it) ? it.value : it; - } - /** - * Checks if the collection includes a value with the key `key`. - * - * `key` must contain a representation of an integer for this to succeed. - * It may be wrapped in a `Scalar`. - */ - has(key) { - const idx = asItemIndex(key); - return typeof idx === 'number' && idx < this.items.length; - } - /** - * Sets a value in this collection. For `!!set`, `value` needs to be a - * boolean to add/remove the item from the set. - * - * If `key` does not contain a representation of an integer, this will throw. - * It may be wrapped in a `Scalar`. - */ - set(key, value) { - const idx = asItemIndex(key); - if (typeof idx !== 'number') - throw new Error(`Expected a valid index, not ${key}.`); - const prev = this.items[idx]; - if (identity.isScalar(prev) && Scalar.isScalarValue(value)) - prev.value = value; - else - this.items[idx] = value; - } - toJSON(_, ctx) { - const seq = []; - if (ctx?.onCreate) - ctx.onCreate(seq); - let i = 0; - for (const item of this.items) - seq.push(toJS.toJS(item, String(i++), ctx)); - return seq; - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - return stringifyCollection.stringifyCollection(this, ctx, { - blockItemPrefix: '- ', - flowChars: { start: '[', end: ']' }, - itemIndent: (ctx.indent || '') + ' ', - onChompKeep, - onComment - }); - } - static from(schema, obj, ctx) { - const { replacer } = ctx; - const seq = new this(schema); - if (obj && Symbol.iterator in Object(obj)) { - let i = 0; - for (let it of obj) { - if (typeof replacer === 'function') { - const key = obj instanceof Set ? it : String(i++); - it = replacer.call(obj, key, it); - } - seq.items.push(createNode.createNode(it, undefined, ctx)); - } - } - return seq; - } -} -function asItemIndex(key) { - let idx = identity.isScalar(key) ? key.value : key; - if (idx && typeof idx === 'string') - idx = Number(idx); - return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 - ? idx - : null; -} - -exports.YAMLSeq = YAMLSeq; diff --git a/bin/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts b/bin/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts deleted file mode 100644 index 70d9e62..0000000 --- a/bin/node_modules/yaml/dist/nodes/addPairToJSMap.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { Pair } from './Pair.js'; -import { ToJSContext } from './toJS.js'; -import type { MapLike } from './YAMLMap.js'; -export declare function addPairToJSMap(ctx: ToJSContext | undefined, map: MapLike, { key, value }: Pair): MapLike; diff --git a/bin/node_modules/yaml/dist/nodes/addPairToJSMap.js b/bin/node_modules/yaml/dist/nodes/addPairToJSMap.js deleted file mode 100644 index 30f6f95..0000000 --- a/bin/node_modules/yaml/dist/nodes/addPairToJSMap.js +++ /dev/null @@ -1,106 +0,0 @@ -'use strict'; - -var log = require('../log.js'); -var stringify = require('../stringify/stringify.js'); -var identity = require('./identity.js'); -var Scalar = require('./Scalar.js'); -var toJS = require('./toJS.js'); - -const MERGE_KEY = '<<'; -function addPairToJSMap(ctx, map, { key, value }) { - if (ctx?.doc.schema.merge && isMergeKey(key)) { - value = identity.isAlias(value) ? value.resolve(ctx.doc) : value; - if (identity.isSeq(value)) - for (const it of value.items) - mergeToJSMap(ctx, map, it); - else if (Array.isArray(value)) - for (const it of value) - mergeToJSMap(ctx, map, it); - else - mergeToJSMap(ctx, map, value); - } - else { - const jsKey = toJS.toJS(key, '', ctx); - if (map instanceof Map) { - map.set(jsKey, toJS.toJS(value, jsKey, ctx)); - } - else if (map instanceof Set) { - map.add(jsKey); - } - else { - const stringKey = stringifyKey(key, jsKey, ctx); - const jsValue = toJS.toJS(value, stringKey, ctx); - if (stringKey in map) - Object.defineProperty(map, stringKey, { - value: jsValue, - writable: true, - enumerable: true, - configurable: true - }); - else - map[stringKey] = jsValue; - } - } - return map; -} -const isMergeKey = (key) => key === MERGE_KEY || - (identity.isScalar(key) && - key.value === MERGE_KEY && - (!key.type || key.type === Scalar.Scalar.PLAIN)); -// If the value associated with a merge key is a single mapping node, each of -// its key/value pairs is inserted into the current mapping, unless the key -// already exists in it. If the value associated with the merge key is a -// sequence, then this sequence is expected to contain mapping nodes and each -// of these nodes is merged in turn according to its order in the sequence. -// Keys in mapping nodes earlier in the sequence override keys specified in -// later mapping nodes. -- http://yaml.org/type/merge.html -function mergeToJSMap(ctx, map, value) { - const source = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value; - if (!identity.isMap(source)) - throw new Error('Merge sources must be maps or map aliases'); - const srcMap = source.toJSON(null, ctx, Map); - for (const [key, value] of srcMap) { - if (map instanceof Map) { - if (!map.has(key)) - map.set(key, value); - } - else if (map instanceof Set) { - map.add(key); - } - else if (!Object.prototype.hasOwnProperty.call(map, key)) { - Object.defineProperty(map, key, { - value, - writable: true, - enumerable: true, - configurable: true - }); - } - } - return map; -} -function stringifyKey(key, jsKey, ctx) { - if (jsKey === null) - return ''; - if (typeof jsKey !== 'object') - return String(jsKey); - if (identity.isNode(key) && ctx?.doc) { - const strCtx = stringify.createStringifyContext(ctx.doc, {}); - strCtx.anchors = new Set(); - for (const node of ctx.anchors.keys()) - strCtx.anchors.add(node.anchor); - strCtx.inFlow = true; - strCtx.inStringifyKey = true; - const strKey = key.toString(strCtx); - if (!ctx.mapKeyWarned) { - let jsonStr = JSON.stringify(strKey); - if (jsonStr.length > 40) - jsonStr = jsonStr.substring(0, 36) + '..."'; - log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); - ctx.mapKeyWarned = true; - } - return strKey; - } - return JSON.stringify(jsKey); -} - -exports.addPairToJSMap = addPairToJSMap; diff --git a/bin/node_modules/yaml/dist/nodes/identity.d.ts b/bin/node_modules/yaml/dist/nodes/identity.d.ts deleted file mode 100644 index c12fdf7..0000000 --- a/bin/node_modules/yaml/dist/nodes/identity.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { Alias } from './Alias.js'; -import type { Node } from './Node.js'; -import type { Pair } from './Pair.js'; -import type { Scalar } from './Scalar.js'; -import type { YAMLMap } from './YAMLMap.js'; -import type { YAMLSeq } from './YAMLSeq.js'; -export declare const ALIAS: unique symbol; -export declare const DOC: unique symbol; -export declare const MAP: unique symbol; -export declare const PAIR: unique symbol; -export declare const SCALAR: unique symbol; -export declare const SEQ: unique symbol; -export declare const NODE_TYPE: unique symbol; -export declare const isAlias: (node: any) => node is Alias; -export declare const isDocument: (node: any) => node is Document; -export declare const isMap: (node: any) => node is YAMLMap; -export declare const isPair: (node: any) => node is Pair; -export declare const isScalar: (node: any) => node is Scalar; -export declare const isSeq: (node: any) => node is YAMLSeq; -export declare function isCollection(node: any): node is YAMLMap | YAMLSeq; -export declare function isNode(node: any): node is Node; -export declare const hasAnchor: (node: unknown) => node is Scalar | YAMLMap | YAMLSeq; diff --git a/bin/node_modules/yaml/dist/nodes/identity.js b/bin/node_modules/yaml/dist/nodes/identity.js deleted file mode 100644 index 5794aa3..0000000 --- a/bin/node_modules/yaml/dist/nodes/identity.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict'; - -const ALIAS = Symbol.for('yaml.alias'); -const DOC = Symbol.for('yaml.document'); -const MAP = Symbol.for('yaml.map'); -const PAIR = Symbol.for('yaml.pair'); -const SCALAR = Symbol.for('yaml.scalar'); -const SEQ = Symbol.for('yaml.seq'); -const NODE_TYPE = Symbol.for('yaml.node.type'); -const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; -const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; -const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; -const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; -const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR; -const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; -function isCollection(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case MAP: - case SEQ: - return true; - } - return false; -} -function isNode(node) { - if (node && typeof node === 'object') - switch (node[NODE_TYPE]) { - case ALIAS: - case MAP: - case SCALAR: - case SEQ: - return true; - } - return false; -} -const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor; - -exports.ALIAS = ALIAS; -exports.DOC = DOC; -exports.MAP = MAP; -exports.NODE_TYPE = NODE_TYPE; -exports.PAIR = PAIR; -exports.SCALAR = SCALAR; -exports.SEQ = SEQ; -exports.hasAnchor = hasAnchor; -exports.isAlias = isAlias; -exports.isCollection = isCollection; -exports.isDocument = isDocument; -exports.isMap = isMap; -exports.isNode = isNode; -exports.isPair = isPair; -exports.isScalar = isScalar; -exports.isSeq = isSeq; diff --git a/bin/node_modules/yaml/dist/nodes/toJS.d.ts b/bin/node_modules/yaml/dist/nodes/toJS.d.ts deleted file mode 100644 index dcd0642..0000000 --- a/bin/node_modules/yaml/dist/nodes/toJS.d.ts +++ /dev/null @@ -1,27 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { Node } from './Node.js'; -export interface AnchorData { - aliasCount: number; - count: number; - res: unknown; -} -export interface ToJSContext { - anchors: Map; - doc: Document; - keep: boolean; - mapAsMap: boolean; - mapKeyWarned: boolean; - maxAliasCount: number; - onCreate?: (res: unknown) => void; -} -/** - * Recursively convert any node or its contents to native JavaScript - * - * @param value - The input value - * @param arg - If `value` defines a `toJSON()` method, use this - * as its first argument - * @param ctx - Conversion context, originally set in Document#toJS(). If - * `{ keep: true }` is not set, output should be suitable for JSON - * stringification. - */ -export declare function toJS(value: any, arg: string | null, ctx?: ToJSContext): any; diff --git a/bin/node_modules/yaml/dist/nodes/toJS.js b/bin/node_modules/yaml/dist/nodes/toJS.js deleted file mode 100644 index a012823..0000000 --- a/bin/node_modules/yaml/dist/nodes/toJS.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict'; - -var identity = require('./identity.js'); - -/** - * Recursively convert any node or its contents to native JavaScript - * - * @param value - The input value - * @param arg - If `value` defines a `toJSON()` method, use this - * as its first argument - * @param ctx - Conversion context, originally set in Document#toJS(). If - * `{ keep: true }` is not set, output should be suitable for JSON - * stringification. - */ -function toJS(value, arg, ctx) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-return - if (Array.isArray(value)) - return value.map((v, i) => toJS(v, String(i), ctx)); - if (value && typeof value.toJSON === 'function') { - // eslint-disable-next-line @typescript-eslint/no-unsafe-call - if (!ctx || !identity.hasAnchor(value)) - return value.toJSON(arg, ctx); - const data = { aliasCount: 0, count: 1, res: undefined }; - ctx.anchors.set(value, data); - ctx.onCreate = res => { - data.res = res; - delete ctx.onCreate; - }; - const res = value.toJSON(arg, ctx); - if (ctx.onCreate) - ctx.onCreate(res); - return res; - } - if (typeof value === 'bigint' && !ctx?.keep) - return Number(value); - return value; -} - -exports.toJS = toJS; diff --git a/bin/node_modules/yaml/dist/options.d.ts b/bin/node_modules/yaml/dist/options.d.ts deleted file mode 100644 index 85bec7e..0000000 --- a/bin/node_modules/yaml/dist/options.d.ts +++ /dev/null @@ -1,338 +0,0 @@ -import type { Reviver } from './doc/applyReviver.js'; -import type { Directives } from './doc/directives.js'; -import type { LogLevelId } from './log.js'; -import type { ParsedNode } from './nodes/Node.js'; -import type { Pair } from './nodes/Pair.js'; -import type { Scalar } from './nodes/Scalar.js'; -import type { LineCounter } from './parse/line-counter.js'; -import type { Schema } from './schema/Schema.js'; -import type { Tags } from './schema/tags.js'; -import type { CollectionTag, ScalarTag } from './schema/types.js'; -export type ParseOptions = { - /** - * Whether integers should be parsed into BigInt rather than number values. - * - * Default: `false` - * - * https://developer.mozilla.org/en/docs/Web/JavaScript/Reference/Global_Objects/BigInt - */ - intAsBigInt?: boolean; - /** - * Include a `srcToken` value on each parsed `Node`, containing the CST token - * that was composed into this node. - * - * Default: `false` - */ - keepSourceTokens?: boolean; - /** - * If set, newlines will be tracked, to allow for `lineCounter.linePos(offset)` - * to provide the `{ line, col }` positions within the input. - */ - lineCounter?: LineCounter; - /** - * Include line/col position & node type directly in parse errors. - * - * Default: `true` - */ - prettyErrors?: boolean; - /** - * Detect and report errors that are required by the YAML 1.2 spec, - * but are caused by unambiguous content. - * - * Default: `true` - */ - strict?: boolean; - /** - * YAML requires map keys to be unique. By default, this is checked by - * comparing scalar values with `===`; deep equality is not checked for - * aliases or collections. If merge keys are enabled by the schema, - * multiple `<<` keys are allowed. - * - * Set `false` to disable, or provide your own comparator function to - * customise. The comparator will be passed two `ParsedNode` values, and - * is expected to return a `boolean` indicating their equality. - * - * Default: `true` - */ - uniqueKeys?: boolean | ((a: ParsedNode, b: ParsedNode) => boolean); -}; -export type DocumentOptions = { - /** - * @internal - * Used internally by Composer. If set and includes an explicit version, - * that overrides the `version` option. - */ - _directives?: Directives; - /** - * Control the logging level during parsing - * - * Default: `'warn'` - */ - logLevel?: LogLevelId; - /** - * The YAML version used by documents without a `%YAML` directive. - * - * Default: `"1.2"` - */ - version?: '1.1' | '1.2' | 'next'; -}; -export type SchemaOptions = { - /** - * When parsing, warn about compatibility issues with the given schema. - * When stringifying, use scalar styles that are parsed correctly - * by the `compat` schema as well as the actual schema. - * - * Default: `null` - */ - compat?: string | Tags | null; - /** - * Array of additional tags to include in the schema, or a function that may - * modify the schema's base tag array. - */ - customTags?: Tags | ((tags: Tags) => Tags) | null; - /** - * Enable support for `<<` merge keys. - * - * Default: `false` for YAML 1.2, `true` for earlier versions - */ - merge?: boolean; - /** - * When using the `'core'` schema, support parsing values with these - * explicit YAML 1.1 tags: - * - * `!!binary`, `!!omap`, `!!pairs`, `!!set`, `!!timestamp`. - * - * Default `true` - */ - resolveKnownTags?: boolean; - /** - * The base schema to use. - * - * The core library has built-in support for the following: - * - `'failsafe'`: A minimal schema that parses all scalars as strings - * - `'core'`: The YAML 1.2 core schema - * - `'json'`: The YAML 1.2 JSON schema, with minimal rules for JSON compatibility - * - `'yaml-1.1'`: The YAML 1.1 schema - * - * If using another (custom) schema, the `customTags` array needs to - * fully define the schema's tags. - * - * Default: `'core'` for YAML 1.2, `'yaml-1.1'` for earlier versions - */ - schema?: string | Schema; - /** - * When adding to or stringifying a map, sort the entries. - * If `true`, sort by comparing key values with `<`. - * Does not affect item order when parsing. - * - * Default: `false` - */ - sortMapEntries?: boolean | ((a: Pair, b: Pair) => number); - /** - * Override default values for `toString()` options. - */ - toStringDefaults?: ToStringOptions; -}; -export type CreateNodeOptions = { - /** - * During node construction, use anchors and aliases to keep strictly equal - * non-null objects as equivalent in YAML. - * - * Default: `true` - */ - aliasDuplicateObjects?: boolean; - /** - * Default prefix for anchors. - * - * Default: `'a'`, resulting in anchors `a1`, `a2`, etc. - */ - anchorPrefix?: string; - /** Force the top-level collection node to use flow style. */ - flow?: boolean; - /** - * Keep `undefined` object values when creating mappings, rather than - * discarding them. - * - * Default: `false` - */ - keepUndefined?: boolean | null; - onTagObj?: (tagObj: ScalarTag | CollectionTag) => void; - /** - * Specify the top-level collection type, e.g. `"!!omap"`. Note that this - * requires the corresponding tag to be available in this document's schema. - */ - tag?: string; -}; -export type ToJSOptions = { - /** - * Use Map rather than Object to represent mappings. - * - * Default: `false` - */ - mapAsMap?: boolean; - /** - * Prevent exponential entity expansion attacks by limiting data aliasing count; - * set to `-1` to disable checks; `0` disallows all alias nodes. - * - * Default: `100` - */ - maxAliasCount?: number; - /** - * If defined, called with the resolved `value` and reference `count` for - * each anchor in the document. - */ - onAnchor?: (value: unknown, count: number) => void; - /** - * Optional function that may filter or modify the output JS value - * - * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse#using_the_reviver_parameter - */ - reviver?: Reviver; -}; -export type ToStringOptions = { - /** - * Use block quote styles for scalar values where applicable. - * Set to `false` to disable block quotes completely. - * - * Default: `true` - */ - blockQuote?: boolean | 'folded' | 'literal'; - /** - * Enforce `'block'` or `'flow'` style on maps and sequences. - * Empty collections will always be stringified as `{}` or `[]`. - * - * Default: `'any'`, allowing each node to set its style separately - * with its `flow: boolean` (default `false`) property. - */ - collectionStyle?: 'any' | 'block' | 'flow'; - /** - * Comment stringifier. - * Output should be valid for the current schema. - * - * By default, empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ - commentString?: (comment: string) => string; - /** - * The default type of string literal used to stringify implicit key values. - * Output may use other types if required to fully represent the value. - * - * If `null`, the value of `defaultStringType` is used. - * - * Default: `null` - */ - defaultKeyType?: Scalar.Type | null; - /** - * The default type of string literal used to stringify values in general. - * Output may use other types if required to fully represent the value. - * - * Default: `'PLAIN'` - */ - defaultStringType?: Scalar.Type; - /** - * Include directives in the output. - * - * - If `true`, at least the document-start marker `---` is always included. - * This does not force the `%YAML` directive to be included. To do that, - * set `doc.directives.yaml.explicit = true`. - * - If `false`, no directives or marker is ever included. If using the `%TAG` - * directive, you are expected to include it manually in the stream before - * its use. - * - If `null`, directives and marker may be included if required. - * - * Default: `null` - */ - directives?: boolean | null; - /** - * Restrict double-quoted strings to use JSON-compatible syntax. - * - * Default: `false` - */ - doubleQuotedAsJSON?: boolean; - /** - * Minimum length for double-quoted strings to use multiple lines to - * represent the value. Ignored if `doubleQuotedAsJSON` is set. - * - * Default: `40` - */ - doubleQuotedMinMultiLineLength?: number; - /** - * String representation for `false`. - * With the core schema, use `'false'`, `'False'`, or `'FALSE'`. - * - * Default: `'false'` - */ - falseStr?: string; - /** - * When true, a single space of padding will be added inside the delimiters - * of non-empty single-line flow collections. - * - * Default: `true` - */ - flowCollectionPadding?: boolean; - /** - * The number of spaces to use when indenting code. - * - * Default: `2` - */ - indent?: number; - /** - * Whether block sequences should be indented. - * - * Default: `true` - */ - indentSeq?: boolean; - /** - * Maximum line width (set to `0` to disable folding). - * - * This is a soft limit, as only double-quoted semantics allow for inserting - * a line break in the middle of a word, as well as being influenced by the - * `minContentWidth` option. - * - * Default: `80` - */ - lineWidth?: number; - /** - * Minimum line width for highly-indented content (set to `0` to disable). - * - * Default: `20` - */ - minContentWidth?: number; - /** - * String representation for `null`. - * With the core schema, use `'null'`, `'Null'`, `'NULL'`, `'~'`, or an empty - * string `''`. - * - * Default: `'null'` - */ - nullStr?: string; - /** - * Require keys to be scalars and to use implicit rather than explicit notation. - * - * Default: `false` - */ - simpleKeys?: boolean; - /** - * Use 'single quote' rather than "double quote" where applicable. - * Set to `false` to disable single quotes completely. - * - * Default: `null` - */ - singleQuote?: boolean | null; - /** - * String representation for `true`. - * With the core schema, use `'true'`, `'True'`, or `'TRUE'`. - * - * Default: `'true'` - */ - trueStr?: string; - /** - * The anchor used by an alias must be defined before the alias node. As it's - * possible for the document to be modified manually, the order may be - * verified during stringification. - * - * Default: `'true'` - */ - verifyAliasOrder?: boolean; -}; diff --git a/bin/node_modules/yaml/dist/parse/cst-scalar.d.ts b/bin/node_modules/yaml/dist/parse/cst-scalar.d.ts deleted file mode 100644 index a7bd1d6..0000000 --- a/bin/node_modules/yaml/dist/parse/cst-scalar.d.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { ErrorCode } from '../errors.js'; -import { Range } from '../nodes/Node.js'; -import type { Scalar } from '../nodes/Scalar.js'; -import type { BlockScalar, FlowScalar, SourceToken, Token } from './cst.js'; -/** - * If `token` is a CST flow or block scalar, determine its string value and a few other attributes. - * Otherwise, return `null`. - */ -export declare function resolveAsScalar(token: FlowScalar | BlockScalar, strict?: boolean, onError?: (offset: number, code: ErrorCode, message: string) => void): { - value: string; - type: Scalar.Type | null; - comment: string; - range: Range; -}; -export declare function resolveAsScalar(token: Token | null | undefined, strict?: boolean, onError?: (offset: number, code: ErrorCode, message: string) => void): { - value: string; - type: Scalar.Type | null; - comment: string; - range: Range; -} | null; -/** - * Create a new scalar token with `value` - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param value The string representation of the value, which will have its content properly indented. - * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.indent The indent level of the token. - * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. - * @param context.offset The offset position of the token. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -export declare function createScalarToken(value: string, context: { - end?: SourceToken[]; - implicitKey?: boolean; - indent: number; - inFlow?: boolean; - offset?: number; - type?: Scalar.Type; -}): BlockScalar | FlowScalar; -/** - * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. - * - * Best efforts are made to retain any comments previously associated with the `token`, - * though all contents within a collection's `items` will be overwritten. - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. - * @param value The string representation of the value, which will have its content properly indented. - * @param context.afterKey In most cases, values after a key should have an additional level of indentation. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -export declare function setScalarValue(token: Token, value: string, context?: { - afterKey?: boolean; - implicitKey?: boolean; - inFlow?: boolean; - type?: Scalar.Type; -}): void; diff --git a/bin/node_modules/yaml/dist/parse/cst-scalar.js b/bin/node_modules/yaml/dist/parse/cst-scalar.js deleted file mode 100644 index 11c5fcc..0000000 --- a/bin/node_modules/yaml/dist/parse/cst-scalar.js +++ /dev/null @@ -1,218 +0,0 @@ -'use strict'; - -var resolveBlockScalar = require('../compose/resolve-block-scalar.js'); -var resolveFlowScalar = require('../compose/resolve-flow-scalar.js'); -var errors = require('../errors.js'); -var stringifyString = require('../stringify/stringifyString.js'); - -function resolveAsScalar(token, strict = true, onError) { - if (token) { - const _onError = (pos, code, message) => { - const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; - if (onError) - onError(offset, code, message); - else - throw new errors.YAMLParseError([offset, offset + 1], code, message); - }; - switch (token.type) { - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return resolveFlowScalar.resolveFlowScalar(token, strict, _onError); - case 'block-scalar': - return resolveBlockScalar.resolveBlockScalar(token, strict, _onError); - } - } - return null; -} -/** - * Create a new scalar token with `value` - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param value The string representation of the value, which will have its content properly indented. - * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.indent The indent level of the token. - * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. - * @param context.offset The offset position of the token. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function createScalarToken(value, context) { - const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; - const source = stringifyString.stringifyString({ type, value }, { - implicitKey, - indent: indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - const end = context.end ?? [ - { type: 'newline', offset: -1, indent, source: '\n' } - ]; - switch (source[0]) { - case '|': - case '>': { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, end)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - return { type: 'block-scalar', offset, indent, props, source: body }; - } - case '"': - return { type: 'double-quoted-scalar', offset, indent, source, end }; - case "'": - return { type: 'single-quoted-scalar', offset, indent, source, end }; - default: - return { type: 'scalar', offset, indent, source, end }; - } -} -/** - * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. - * - * Best efforts are made to retain any comments previously associated with the `token`, - * though all contents within a collection's `items` will be overwritten. - * - * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, - * as this function does not support any schema operations and won't check for such conflicts. - * - * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. - * @param value The string representation of the value, which will have its content properly indented. - * @param context.afterKey In most cases, values after a key should have an additional level of indentation. - * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. - * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. - * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. - */ -function setScalarValue(token, value, context = {}) { - let { afterKey = false, implicitKey = false, inFlow = false, type } = context; - let indent = 'indent' in token ? token.indent : null; - if (afterKey && typeof indent === 'number') - indent += 2; - if (!type) - switch (token.type) { - case 'single-quoted-scalar': - type = 'QUOTE_SINGLE'; - break; - case 'double-quoted-scalar': - type = 'QUOTE_DOUBLE'; - break; - case 'block-scalar': { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; - break; - } - default: - type = 'PLAIN'; - } - const source = stringifyString.stringifyString({ type, value }, { - implicitKey: implicitKey || indent === null, - indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', - inFlow, - options: { blockQuote: true, lineWidth: -1 } - }); - switch (source[0]) { - case '|': - case '>': - setBlockScalarValue(token, source); - break; - case '"': - setFlowScalarValue(token, source, 'double-quoted-scalar'); - break; - case "'": - setFlowScalarValue(token, source, 'single-quoted-scalar'); - break; - default: - setFlowScalarValue(token, source, 'scalar'); - } -} -function setBlockScalarValue(token, source) { - const he = source.indexOf('\n'); - const head = source.substring(0, he); - const body = source.substring(he + 1) + '\n'; - if (token.type === 'block-scalar') { - const header = token.props[0]; - if (header.type !== 'block-scalar-header') - throw new Error('Invalid block scalar header'); - header.source = head; - token.source = body; - } - else { - const { offset } = token; - const indent = 'indent' in token ? token.indent : -1; - const props = [ - { type: 'block-scalar-header', offset, indent, source: head } - ]; - if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) - props.push({ type: 'newline', offset: -1, indent, source: '\n' }); - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type: 'block-scalar', indent, props, source: body }); - } -} -/** @returns `true` if last token is a newline */ -function addEndtoBlockProps(props, end) { - if (end) - for (const st of end) - switch (st.type) { - case 'space': - case 'comment': - props.push(st); - break; - case 'newline': - props.push(st); - return true; - } - return false; -} -function setFlowScalarValue(token, source, type) { - switch (token.type) { - case 'scalar': - case 'double-quoted-scalar': - case 'single-quoted-scalar': - token.type = type; - token.source = source; - break; - case 'block-scalar': { - const end = token.props.slice(1); - let oa = source.length; - if (token.props[0].type === 'block-scalar-header') - oa -= token.props[0].source.length; - for (const tok of end) - tok.offset += oa; - delete token.props; - Object.assign(token, { type, source, end }); - break; - } - case 'block-map': - case 'block-seq': { - const offset = token.offset + source.length; - const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; - delete token.items; - Object.assign(token, { type, source, end: [nl] }); - break; - } - default: { - const indent = 'indent' in token ? token.indent : -1; - const end = 'end' in token && Array.isArray(token.end) - ? token.end.filter(st => st.type === 'space' || - st.type === 'comment' || - st.type === 'newline') - : []; - for (const key of Object.keys(token)) - if (key !== 'type' && key !== 'offset') - delete token[key]; - Object.assign(token, { type, indent, source, end }); - } - } -} - -exports.createScalarToken = createScalarToken; -exports.resolveAsScalar = resolveAsScalar; -exports.setScalarValue = setScalarValue; diff --git a/bin/node_modules/yaml/dist/parse/cst-stringify.d.ts b/bin/node_modules/yaml/dist/parse/cst-stringify.d.ts deleted file mode 100644 index dbf66d6..0000000 --- a/bin/node_modules/yaml/dist/parse/cst-stringify.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import type { CollectionItem, Token } from './cst.js'; -/** - * Stringify a CST document, token, or collection item - * - * Fair warning: This applies no validation whatsoever, and - * simply concatenates the sources in their logical order. - */ -export declare const stringify: (cst: Token | CollectionItem) => string; diff --git a/bin/node_modules/yaml/dist/parse/cst-stringify.js b/bin/node_modules/yaml/dist/parse/cst-stringify.js deleted file mode 100644 index 78e8c37..0000000 --- a/bin/node_modules/yaml/dist/parse/cst-stringify.js +++ /dev/null @@ -1,63 +0,0 @@ -'use strict'; - -/** - * Stringify a CST document, token, or collection item - * - * Fair warning: This applies no validation whatsoever, and - * simply concatenates the sources in their logical order. - */ -const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); -function stringifyToken(token) { - switch (token.type) { - case 'block-scalar': { - let res = ''; - for (const tok of token.props) - res += stringifyToken(tok); - return res + token.source; - } - case 'block-map': - case 'block-seq': { - let res = ''; - for (const item of token.items) - res += stringifyItem(item); - return res; - } - case 'flow-collection': { - let res = token.start.source; - for (const item of token.items) - res += stringifyItem(item); - for (const st of token.end) - res += st.source; - return res; - } - case 'document': { - let res = stringifyItem(token); - if (token.end) - for (const st of token.end) - res += st.source; - return res; - } - default: { - let res = token.source; - if ('end' in token && token.end) - for (const st of token.end) - res += st.source; - return res; - } - } -} -function stringifyItem({ start, key, sep, value }) { - let res = ''; - for (const st of start) - res += st.source; - if (key) - res += stringifyToken(key); - if (sep) - for (const st of sep) - res += st.source; - if (value) - res += stringifyToken(value); - return res; -} - -exports.stringify = stringify; diff --git a/bin/node_modules/yaml/dist/parse/cst-visit.d.ts b/bin/node_modules/yaml/dist/parse/cst-visit.d.ts deleted file mode 100644 index 4f21f05..0000000 --- a/bin/node_modules/yaml/dist/parse/cst-visit.d.ts +++ /dev/null @@ -1,39 +0,0 @@ -import type { CollectionItem, Document } from './cst.js'; -export type VisitPath = readonly ['key' | 'value', number][]; -export type Visitor = (item: CollectionItem, path: VisitPath) => number | symbol | Visitor | void; -/** - * Apply a visitor to a CST document or item. - * - * Walks through the tree (depth-first) starting from the root, calling a - * `visitor` function with two arguments when entering each item: - * - `item`: The current item, which included the following members: - * - `start: SourceToken[]` – Source tokens before the key or value, - * possibly including its anchor or tag. - * - `key?: Token | null` – Set for pair values. May then be `null`, if - * the key before the `:` separator is empty. - * - `sep?: SourceToken[]` – Source tokens between the key and the value, - * which should include the `:` map value indicator if `value` is set. - * - `value?: Token` – The value of a sequence item, or of a map pair. - * - `path`: The steps from the root to the current node, as an array of - * `['key' | 'value', number]` tuples. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this token, continue with - * next sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current item, then continue with the next one - * - `number`: Set the index of the next step. This is useful especially if - * the index of the current token has changed. - * - `function`: Define the next visitor for this item. After the original - * visitor is called on item entry, next visitors are called after handling - * a non-empty `key` and when exiting the item. - */ -export declare function visit(cst: Document | CollectionItem, visitor: Visitor): void; -export declare namespace visit { - var BREAK: symbol; - var SKIP: symbol; - var REMOVE: symbol; - var itemAtPath: (cst: Document | CollectionItem, path: VisitPath) => CollectionItem | undefined; - var parentCollection: (cst: Document | CollectionItem, path: VisitPath) => import("./cst.js").BlockMap | import("./cst.js").BlockSequence | import("./cst.js").FlowCollection; -} diff --git a/bin/node_modules/yaml/dist/parse/cst-visit.js b/bin/node_modules/yaml/dist/parse/cst-visit.js deleted file mode 100644 index 9ceee93..0000000 --- a/bin/node_modules/yaml/dist/parse/cst-visit.js +++ /dev/null @@ -1,99 +0,0 @@ -'use strict'; - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove item'); -/** - * Apply a visitor to a CST document or item. - * - * Walks through the tree (depth-first) starting from the root, calling a - * `visitor` function with two arguments when entering each item: - * - `item`: The current item, which included the following members: - * - `start: SourceToken[]` – Source tokens before the key or value, - * possibly including its anchor or tag. - * - `key?: Token | null` – Set for pair values. May then be `null`, if - * the key before the `:` separator is empty. - * - `sep?: SourceToken[]` – Source tokens between the key and the value, - * which should include the `:` map value indicator if `value` is set. - * - `value?: Token` – The value of a sequence item, or of a map pair. - * - `path`: The steps from the root to the current node, as an array of - * `['key' | 'value', number]` tuples. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this token, continue with - * next sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current item, then continue with the next one - * - `number`: Set the index of the next step. This is useful especially if - * the index of the current token has changed. - * - `function`: Define the next visitor for this item. After the original - * visitor is called on item entry, next visitors are called after handling - * a non-empty `key` and when exiting the item. - */ -function visit(cst, visitor) { - if ('type' in cst && cst.type === 'document') - cst = { start: cst.start, value: cst.value }; - _visit(Object.freeze([]), cst, visitor); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current item */ -visit.SKIP = SKIP; -/** Remove the current item */ -visit.REMOVE = REMOVE; -/** Find the item at `path` from `cst` as the root */ -visit.itemAtPath = (cst, path) => { - let item = cst; - for (const [field, index] of path) { - const tok = item?.[field]; - if (tok && 'items' in tok) { - item = tok.items[index]; - } - else - return undefined; - } - return item; -}; -/** - * Get the immediate parent collection of the item at `path` from `cst` as the root. - * - * Throws an error if the collection is not found, which should never happen if the item itself exists. - */ -visit.parentCollection = (cst, path) => { - const parent = visit.itemAtPath(cst, path.slice(0, -1)); - const field = path[path.length - 1][0]; - const coll = parent?.[field]; - if (coll && 'items' in coll) - return coll; - throw new Error('Parent collection not found'); -}; -function _visit(path, item, visitor) { - let ctrl = visitor(item, path); - if (typeof ctrl === 'symbol') - return ctrl; - for (const field of ['key', 'value']) { - const token = item[field]; - if (token && 'items' in token) { - for (let i = 0; i < token.items.length; ++i) { - const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - token.items.splice(i, 1); - i -= 1; - } - } - if (typeof ctrl === 'function' && field === 'key') - ctrl = ctrl(item, path); - } - } - return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; -} - -exports.visit = visit; diff --git a/bin/node_modules/yaml/dist/parse/cst.d.ts b/bin/node_modules/yaml/dist/parse/cst.d.ts deleted file mode 100644 index ec57ace..0000000 --- a/bin/node_modules/yaml/dist/parse/cst.d.ts +++ /dev/null @@ -1,106 +0,0 @@ -export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar.js'; -export { stringify } from './cst-stringify.js'; -export { visit, Visitor, VisitPath } from './cst-visit.js'; -export interface SourceToken { - type: 'byte-order-mark' | 'doc-mode' | 'doc-start' | 'space' | 'comment' | 'newline' | 'directive-line' | 'anchor' | 'tag' | 'seq-item-ind' | 'explicit-key-ind' | 'map-value-ind' | 'flow-map-start' | 'flow-map-end' | 'flow-seq-start' | 'flow-seq-end' | 'flow-error-end' | 'comma' | 'block-scalar-header'; - offset: number; - indent: number; - source: string; -} -export interface ErrorToken { - type: 'error'; - offset: number; - source: string; - message: string; -} -export interface Directive { - type: 'directive'; - offset: number; - source: string; -} -export interface Document { - type: 'document'; - offset: number; - start: SourceToken[]; - value?: Token; - end?: SourceToken[]; -} -export interface DocumentEnd { - type: 'doc-end'; - offset: number; - source: string; - end?: SourceToken[]; -} -export interface FlowScalar { - type: 'alias' | 'scalar' | 'single-quoted-scalar' | 'double-quoted-scalar'; - offset: number; - indent: number; - source: string; - end?: SourceToken[]; -} -export interface BlockScalar { - type: 'block-scalar'; - offset: number; - indent: number; - props: Token[]; - source: string; -} -export interface BlockMap { - type: 'block-map'; - offset: number; - indent: number; - items: Array<{ - start: SourceToken[]; - key?: never; - sep?: never; - value?: never; - } | { - start: SourceToken[]; - key: Token | null; - sep: SourceToken[]; - value?: Token; - }>; -} -export interface BlockSequence { - type: 'block-seq'; - offset: number; - indent: number; - items: Array<{ - start: SourceToken[]; - key?: never; - sep?: never; - value?: Token; - }>; -} -export type CollectionItem = { - start: SourceToken[]; - key?: Token | null; - sep?: SourceToken[]; - value?: Token; -}; -export interface FlowCollection { - type: 'flow-collection'; - offset: number; - indent: number; - start: SourceToken; - items: CollectionItem[]; - end: SourceToken[]; -} -export type Token = SourceToken | ErrorToken | Directive | Document | DocumentEnd | FlowScalar | BlockScalar | BlockMap | BlockSequence | FlowCollection; -export type TokenType = SourceToken['type'] | DocumentEnd['type'] | FlowScalar['type']; -/** The byte order mark */ -export declare const BOM = "\uFEFF"; -/** Start of doc-mode */ -export declare const DOCUMENT = "\u0002"; -/** Unexpected end of flow-mode */ -export declare const FLOW_END = "\u0018"; -/** Next token is a scalar value */ -export declare const SCALAR = "\u001F"; -/** @returns `true` if `token` is a flow or block collection */ -export declare const isCollection: (token: Token | null | undefined) => token is BlockMap | BlockSequence | FlowCollection; -/** @returns `true` if `token` is a flow or block scalar; not an alias */ -export declare const isScalar: (token: Token | null | undefined) => token is FlowScalar | BlockScalar; -/** Get a printable representation of a lexer token */ -export declare function prettyToken(token: string): string; -/** Identify the type of a lexer token. May return `null` for unknown tokens. */ -export declare function tokenType(source: string): TokenType | null; diff --git a/bin/node_modules/yaml/dist/parse/cst.js b/bin/node_modules/yaml/dist/parse/cst.js deleted file mode 100644 index 613c229..0000000 --- a/bin/node_modules/yaml/dist/parse/cst.js +++ /dev/null @@ -1,112 +0,0 @@ -'use strict'; - -var cstScalar = require('./cst-scalar.js'); -var cstStringify = require('./cst-stringify.js'); -var cstVisit = require('./cst-visit.js'); - -/** The byte order mark */ -const BOM = '\u{FEFF}'; -/** Start of doc-mode */ -const DOCUMENT = '\x02'; // C0: Start of Text -/** Unexpected end of flow-mode */ -const FLOW_END = '\x18'; // C0: Cancel -/** Next token is a scalar value */ -const SCALAR = '\x1f'; // C0: Unit Separator -/** @returns `true` if `token` is a flow or block collection */ -const isCollection = (token) => !!token && 'items' in token; -/** @returns `true` if `token` is a flow or block scalar; not an alias */ -const isScalar = (token) => !!token && - (token.type === 'scalar' || - token.type === 'single-quoted-scalar' || - token.type === 'double-quoted-scalar' || - token.type === 'block-scalar'); -/* istanbul ignore next */ -/** Get a printable representation of a lexer token */ -function prettyToken(token) { - switch (token) { - case BOM: - return ''; - case DOCUMENT: - return ''; - case FLOW_END: - return ''; - case SCALAR: - return ''; - default: - return JSON.stringify(token); - } -} -/** Identify the type of a lexer token. May return `null` for unknown tokens. */ -function tokenType(source) { - switch (source) { - case BOM: - return 'byte-order-mark'; - case DOCUMENT: - return 'doc-mode'; - case FLOW_END: - return 'flow-error-end'; - case SCALAR: - return 'scalar'; - case '---': - return 'doc-start'; - case '...': - return 'doc-end'; - case '': - case '\n': - case '\r\n': - return 'newline'; - case '-': - return 'seq-item-ind'; - case '?': - return 'explicit-key-ind'; - case ':': - return 'map-value-ind'; - case '{': - return 'flow-map-start'; - case '}': - return 'flow-map-end'; - case '[': - return 'flow-seq-start'; - case ']': - return 'flow-seq-end'; - case ',': - return 'comma'; - } - switch (source[0]) { - case ' ': - case '\t': - return 'space'; - case '#': - return 'comment'; - case '%': - return 'directive-line'; - case '*': - return 'alias'; - case '&': - return 'anchor'; - case '!': - return 'tag'; - case "'": - return 'single-quoted-scalar'; - case '"': - return 'double-quoted-scalar'; - case '|': - case '>': - return 'block-scalar-header'; - } - return null; -} - -exports.createScalarToken = cstScalar.createScalarToken; -exports.resolveAsScalar = cstScalar.resolveAsScalar; -exports.setScalarValue = cstScalar.setScalarValue; -exports.stringify = cstStringify.stringify; -exports.visit = cstVisit.visit; -exports.BOM = BOM; -exports.DOCUMENT = DOCUMENT; -exports.FLOW_END = FLOW_END; -exports.SCALAR = SCALAR; -exports.isCollection = isCollection; -exports.isScalar = isScalar; -exports.prettyToken = prettyToken; -exports.tokenType = tokenType; diff --git a/bin/node_modules/yaml/dist/parse/lexer.d.ts b/bin/node_modules/yaml/dist/parse/lexer.d.ts deleted file mode 100644 index 238e7b5..0000000 --- a/bin/node_modules/yaml/dist/parse/lexer.d.ts +++ /dev/null @@ -1,87 +0,0 @@ -/** - * Splits an input string into lexical tokens, i.e. smaller strings that are - * easily identifiable by `tokens.tokenType()`. - * - * Lexing starts always in a "stream" context. Incomplete input may be buffered - * until a complete token can be emitted. - * - * In addition to slices of the original input, the following control characters - * may also be emitted: - * - * - `\x02` (Start of Text): A document starts with the next token - * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) - * - `\x1f` (Unit Separator): Next token is a scalar value - * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents - */ -export declare class Lexer { - /** - * Flag indicating whether the end of the current buffer marks the end of - * all input - */ - private atEnd; - /** - * Explicit indent set in block scalar header, as an offset from the current - * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not - * explicitly set. - */ - private blockScalarIndent; - /** - * Block scalars that include a + (keep) chomping indicator in their header - * include trailing empty lines, which are otherwise excluded from the - * scalar's contents. - */ - private blockScalarKeep; - /** Current input */ - private buffer; - /** - * Flag noting whether the map value indicator : can immediately follow this - * node within a flow context. - */ - private flowKey; - /** Count of surrounding flow collection levels. */ - private flowLevel; - /** - * Minimum level of indentation required for next lines to be parsed as a - * part of the current scalar value. - */ - private indentNext; - /** Indentation level of the current line. */ - private indentValue; - /** Position of the next \n character. */ - private lineEndPos; - /** Stores the state of the lexer if reaching the end of incpomplete input */ - private next; - /** A pointer to `buffer`; the current position of the lexer. */ - private pos; - /** - * Generate YAML tokens from the `source` string. If `incomplete`, - * a part of the last line may be left as a buffer for the next call. - * - * @returns A generator of lexical tokens - */ - lex(source: string, incomplete?: boolean): Generator; - private atLineEnd; - private charAt; - private continueScalar; - private getLine; - private hasChars; - private setNext; - private peek; - private parseNext; - private parseStream; - private parseLineStart; - private parseBlockStart; - private parseDocument; - private parseFlowCollection; - private parseQuotedScalar; - private parseBlockScalarHeader; - private parseBlockScalar; - private parsePlainScalar; - private pushCount; - private pushToIndex; - private pushIndicators; - private pushTag; - private pushNewline; - private pushSpaces; - private pushUntil; -} diff --git a/bin/node_modules/yaml/dist/parse/lexer.js b/bin/node_modules/yaml/dist/parse/lexer.js deleted file mode 100644 index 5112db7..0000000 --- a/bin/node_modules/yaml/dist/parse/lexer.js +++ /dev/null @@ -1,710 +0,0 @@ -'use strict'; - -var cst = require('./cst.js'); - -/* -START -> stream - -stream - directive -> line-end -> stream - indent + line-end -> stream - [else] -> line-start - -line-end - comment -> line-end - newline -> . - input-end -> END - -line-start - doc-start -> doc - doc-end -> stream - [else] -> indent -> block-start - -block-start - seq-item-start -> block-start - explicit-key-start -> block-start - map-value-start -> block-start - [else] -> doc - -doc - line-end -> line-start - spaces -> doc - anchor -> doc - tag -> doc - flow-start -> flow -> doc - flow-end -> error -> doc - seq-item-start -> error -> doc - explicit-key-start -> error -> doc - map-value-start -> doc - alias -> doc - quote-start -> quoted-scalar -> doc - block-scalar-header -> line-end -> block-scalar(min) -> line-start - [else] -> plain-scalar(false, min) -> doc - -flow - line-end -> flow - spaces -> flow - anchor -> flow - tag -> flow - flow-start -> flow -> flow - flow-end -> . - seq-item-start -> error -> flow - explicit-key-start -> flow - map-value-start -> flow - alias -> flow - quote-start -> quoted-scalar -> flow - comma -> flow - [else] -> plain-scalar(true, 0) -> flow - -quoted-scalar - quote-end -> . - [else] -> quoted-scalar - -block-scalar(min) - newline + peek(indent < min) -> . - [else] -> block-scalar(min) - -plain-scalar(is-flow, min) - scalar-end(is-flow) -> . - peek(newline + (indent < min)) -> . - [else] -> plain-scalar(min) -*/ -function isEmpty(ch) { - switch (ch) { - case undefined: - case ' ': - case '\n': - case '\r': - case '\t': - return true; - default: - return false; - } -} -const hexDigits = '0123456789ABCDEFabcdef'.split(''); -const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split(''); -const invalidFlowScalarChars = ',[]{}'.split(''); -const invalidAnchorChars = ' ,[]{}\n\r\t'.split(''); -const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch); -/** - * Splits an input string into lexical tokens, i.e. smaller strings that are - * easily identifiable by `tokens.tokenType()`. - * - * Lexing starts always in a "stream" context. Incomplete input may be buffered - * until a complete token can be emitted. - * - * In addition to slices of the original input, the following control characters - * may also be emitted: - * - * - `\x02` (Start of Text): A document starts with the next token - * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) - * - `\x1f` (Unit Separator): Next token is a scalar value - * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents - */ -class Lexer { - constructor() { - /** - * Flag indicating whether the end of the current buffer marks the end of - * all input - */ - this.atEnd = false; - /** - * Explicit indent set in block scalar header, as an offset from the current - * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not - * explicitly set. - */ - this.blockScalarIndent = -1; - /** - * Block scalars that include a + (keep) chomping indicator in their header - * include trailing empty lines, which are otherwise excluded from the - * scalar's contents. - */ - this.blockScalarKeep = false; - /** Current input */ - this.buffer = ''; - /** - * Flag noting whether the map value indicator : can immediately follow this - * node within a flow context. - */ - this.flowKey = false; - /** Count of surrounding flow collection levels. */ - this.flowLevel = 0; - /** - * Minimum level of indentation required for next lines to be parsed as a - * part of the current scalar value. - */ - this.indentNext = 0; - /** Indentation level of the current line. */ - this.indentValue = 0; - /** Position of the next \n character. */ - this.lineEndPos = null; - /** Stores the state of the lexer if reaching the end of incpomplete input */ - this.next = null; - /** A pointer to `buffer`; the current position of the lexer. */ - this.pos = 0; - } - /** - * Generate YAML tokens from the `source` string. If `incomplete`, - * a part of the last line may be left as a buffer for the next call. - * - * @returns A generator of lexical tokens - */ - *lex(source, incomplete = false) { - if (source) { - if (typeof source !== 'string') - throw TypeError('source is not a string'); - this.buffer = this.buffer ? this.buffer + source : source; - this.lineEndPos = null; - } - this.atEnd = !incomplete; - let next = this.next ?? 'stream'; - while (next && (incomplete || this.hasChars(1))) - next = yield* this.parseNext(next); - } - atLineEnd() { - let i = this.pos; - let ch = this.buffer[i]; - while (ch === ' ' || ch === '\t') - ch = this.buffer[++i]; - if (!ch || ch === '#' || ch === '\n') - return true; - if (ch === '\r') - return this.buffer[i + 1] === '\n'; - return false; - } - charAt(n) { - return this.buffer[this.pos + n]; - } - continueScalar(offset) { - let ch = this.buffer[offset]; - if (this.indentNext > 0) { - let indent = 0; - while (ch === ' ') - ch = this.buffer[++indent + offset]; - if (ch === '\r') { - const next = this.buffer[indent + offset + 1]; - if (next === '\n' || (!next && !this.atEnd)) - return offset + indent + 1; - } - return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) - ? offset + indent - : -1; - } - if (ch === '-' || ch === '.') { - const dt = this.buffer.substr(offset, 3); - if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) - return -1; - } - return offset; - } - getLine() { - let end = this.lineEndPos; - if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { - end = this.buffer.indexOf('\n', this.pos); - this.lineEndPos = end; - } - if (end === -1) - return this.atEnd ? this.buffer.substring(this.pos) : null; - if (this.buffer[end - 1] === '\r') - end -= 1; - return this.buffer.substring(this.pos, end); - } - hasChars(n) { - return this.pos + n <= this.buffer.length; - } - setNext(state) { - this.buffer = this.buffer.substring(this.pos); - this.pos = 0; - this.lineEndPos = null; - this.next = state; - return null; - } - peek(n) { - return this.buffer.substr(this.pos, n); - } - *parseNext(next) { - switch (next) { - case 'stream': - return yield* this.parseStream(); - case 'line-start': - return yield* this.parseLineStart(); - case 'block-start': - return yield* this.parseBlockStart(); - case 'doc': - return yield* this.parseDocument(); - case 'flow': - return yield* this.parseFlowCollection(); - case 'quoted-scalar': - return yield* this.parseQuotedScalar(); - case 'block-scalar': - return yield* this.parseBlockScalar(); - case 'plain-scalar': - return yield* this.parsePlainScalar(); - } - } - *parseStream() { - let line = this.getLine(); - if (line === null) - return this.setNext('stream'); - if (line[0] === cst.BOM) { - yield* this.pushCount(1); - line = line.substring(1); - } - if (line[0] === '%') { - let dirEnd = line.length; - let cs = line.indexOf('#'); - while (cs !== -1) { - const ch = line[cs - 1]; - if (ch === ' ' || ch === '\t') { - dirEnd = cs - 1; - break; - } - else { - cs = line.indexOf('#', cs + 1); - } - } - while (true) { - const ch = line[dirEnd - 1]; - if (ch === ' ' || ch === '\t') - dirEnd -= 1; - else - break; - } - const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); - yield* this.pushCount(line.length - n); // possible comment - this.pushNewline(); - return 'stream'; - } - if (this.atLineEnd()) { - const sp = yield* this.pushSpaces(true); - yield* this.pushCount(line.length - sp); - yield* this.pushNewline(); - return 'stream'; - } - yield cst.DOCUMENT; - return yield* this.parseLineStart(); - } - *parseLineStart() { - const ch = this.charAt(0); - if (!ch && !this.atEnd) - return this.setNext('line-start'); - if (ch === '-' || ch === '.') { - if (!this.atEnd && !this.hasChars(4)) - return this.setNext('line-start'); - const s = this.peek(3); - if (s === '---' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - this.indentValue = 0; - this.indentNext = 0; - return 'doc'; - } - else if (s === '...' && isEmpty(this.charAt(3))) { - yield* this.pushCount(3); - return 'stream'; - } - } - this.indentValue = yield* this.pushSpaces(false); - if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) - this.indentNext = this.indentValue; - return yield* this.parseBlockStart(); - } - *parseBlockStart() { - const [ch0, ch1] = this.peek(2); - if (!ch1 && !this.atEnd) - return this.setNext('block-start'); - if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { - const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); - this.indentNext = this.indentValue + 1; - this.indentValue += n; - return yield* this.parseBlockStart(); - } - return 'doc'; - } - *parseDocument() { - yield* this.pushSpaces(true); - const line = this.getLine(); - if (line === null) - return this.setNext('doc'); - let n = yield* this.pushIndicators(); - switch (line[n]) { - case '#': - yield* this.pushCount(line.length - n); - // fallthrough - case undefined: - yield* this.pushNewline(); - return yield* this.parseLineStart(); - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel = 1; - return 'flow'; - case '}': - case ']': - // this is an error - yield* this.pushCount(1); - return 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'doc'; - case '"': - case "'": - return yield* this.parseQuotedScalar(); - case '|': - case '>': - n += yield* this.parseBlockScalarHeader(); - n += yield* this.pushSpaces(true); - yield* this.pushCount(line.length - n); - yield* this.pushNewline(); - return yield* this.parseBlockScalar(); - default: - return yield* this.parsePlainScalar(); - } - } - *parseFlowCollection() { - let nl, sp; - let indent = -1; - do { - nl = yield* this.pushNewline(); - if (nl > 0) { - sp = yield* this.pushSpaces(false); - this.indentValue = indent = sp; - } - else { - sp = 0; - } - sp += yield* this.pushSpaces(true); - } while (nl + sp > 0); - const line = this.getLine(); - if (line === null) - return this.setNext('flow'); - if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || - (indent === 0 && - (line.startsWith('---') || line.startsWith('...')) && - isEmpty(line[3]))) { - // Allowing for the terminal ] or } at the same (rather than greater) - // indent level as the initial [ or { is technically invalid, but - // failing here would be surprising to users. - const atFlowEndMarker = indent === this.indentNext - 1 && - this.flowLevel === 1 && - (line[0] === ']' || line[0] === '}'); - if (!atFlowEndMarker) { - // this is an error - this.flowLevel = 0; - yield cst.FLOW_END; - return yield* this.parseLineStart(); - } - } - let n = 0; - while (line[n] === ',') { - n += yield* this.pushCount(1); - n += yield* this.pushSpaces(true); - this.flowKey = false; - } - n += yield* this.pushIndicators(); - switch (line[n]) { - case undefined: - return 'flow'; - case '#': - yield* this.pushCount(line.length - n); - return 'flow'; - case '{': - case '[': - yield* this.pushCount(1); - this.flowKey = false; - this.flowLevel += 1; - return 'flow'; - case '}': - case ']': - yield* this.pushCount(1); - this.flowKey = true; - this.flowLevel -= 1; - return this.flowLevel ? 'flow' : 'doc'; - case '*': - yield* this.pushUntil(isNotAnchorChar); - return 'flow'; - case '"': - case "'": - this.flowKey = true; - return yield* this.parseQuotedScalar(); - case ':': { - const next = this.charAt(1); - if (this.flowKey || isEmpty(next) || next === ',') { - this.flowKey = false; - yield* this.pushCount(1); - yield* this.pushSpaces(true); - return 'flow'; - } - } - // fallthrough - default: - this.flowKey = false; - return yield* this.parsePlainScalar(); - } - } - *parseQuotedScalar() { - const quote = this.charAt(0); - let end = this.buffer.indexOf(quote, this.pos + 1); - if (quote === "'") { - while (end !== -1 && this.buffer[end + 1] === "'") - end = this.buffer.indexOf("'", end + 2); - } - else { - // double-quote - while (end !== -1) { - let n = 0; - while (this.buffer[end - 1 - n] === '\\') - n += 1; - if (n % 2 === 0) - break; - end = this.buffer.indexOf('"', end + 1); - } - } - // Only looking for newlines within the quotes - const qb = this.buffer.substring(0, end); - let nl = qb.indexOf('\n', this.pos); - if (nl !== -1) { - while (nl !== -1) { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = qb.indexOf('\n', cs); - } - if (nl !== -1) { - // this is an error caused by an unexpected unindent - end = nl - (qb[nl - 1] === '\r' ? 2 : 1); - } - } - if (end === -1) { - if (!this.atEnd) - return this.setNext('quoted-scalar'); - end = this.buffer.length; - } - yield* this.pushToIndex(end + 1, false); - return this.flowLevel ? 'flow' : 'doc'; - } - *parseBlockScalarHeader() { - this.blockScalarIndent = -1; - this.blockScalarKeep = false; - let i = this.pos; - while (true) { - const ch = this.buffer[++i]; - if (ch === '+') - this.blockScalarKeep = true; - else if (ch > '0' && ch <= '9') - this.blockScalarIndent = Number(ch) - 1; - else if (ch !== '-') - break; - } - return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); - } - *parseBlockScalar() { - let nl = this.pos - 1; // may be -1 if this.pos === 0 - let indent = 0; - let ch; - loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { - switch (ch) { - case ' ': - indent += 1; - break; - case '\n': - nl = i; - indent = 0; - break; - case '\r': { - const next = this.buffer[i + 1]; - if (!next && !this.atEnd) - return this.setNext('block-scalar'); - if (next === '\n') - break; - } // fallthrough - default: - break loop; - } - } - if (!ch && !this.atEnd) - return this.setNext('block-scalar'); - if (indent >= this.indentNext) { - if (this.blockScalarIndent === -1) - this.indentNext = indent; - else - this.indentNext += this.blockScalarIndent; - do { - const cs = this.continueScalar(nl + 1); - if (cs === -1) - break; - nl = this.buffer.indexOf('\n', cs); - } while (nl !== -1); - if (nl === -1) { - if (!this.atEnd) - return this.setNext('block-scalar'); - nl = this.buffer.length; - } - } - if (!this.blockScalarKeep) { - do { - let i = nl - 1; - let ch = this.buffer[i]; - if (ch === '\r') - ch = this.buffer[--i]; - const lastChar = i; // Drop the line if last char not more indented - while (ch === ' ' || ch === '\t') - ch = this.buffer[--i]; - if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) - nl = i; - else - break; - } while (true); - } - yield cst.SCALAR; - yield* this.pushToIndex(nl + 1, true); - return yield* this.parseLineStart(); - } - *parsePlainScalar() { - const inFlow = this.flowLevel > 0; - let end = this.pos - 1; - let i = this.pos - 1; - let ch; - while ((ch = this.buffer[++i])) { - if (ch === ':') { - const next = this.buffer[i + 1]; - if (isEmpty(next) || (inFlow && next === ',')) - break; - end = i; - } - else if (isEmpty(ch)) { - let next = this.buffer[i + 1]; - if (ch === '\r') { - if (next === '\n') { - i += 1; - ch = '\n'; - next = this.buffer[i + 1]; - } - else - end = i; - } - if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next))) - break; - if (ch === '\n') { - const cs = this.continueScalar(i + 1); - if (cs === -1) - break; - i = Math.max(i, cs - 2); // to advance, but still account for ' #' - } - } - else { - if (inFlow && invalidFlowScalarChars.includes(ch)) - break; - end = i; - } - } - if (!ch && !this.atEnd) - return this.setNext('plain-scalar'); - yield cst.SCALAR; - yield* this.pushToIndex(end + 1, true); - return inFlow ? 'flow' : 'doc'; - } - *pushCount(n) { - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos += n; - return n; - } - return 0; - } - *pushToIndex(i, allowEmpty) { - const s = this.buffer.slice(this.pos, i); - if (s) { - yield s; - this.pos += s.length; - return s.length; - } - else if (allowEmpty) - yield ''; - return 0; - } - *pushIndicators() { - switch (this.charAt(0)) { - case '!': - return ((yield* this.pushTag()) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '&': - return ((yield* this.pushUntil(isNotAnchorChar)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - case '-': // this is an error - case '?': // this is an error outside flow collections - case ':': { - const inFlow = this.flowLevel > 0; - const ch1 = this.charAt(1); - if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) { - if (!inFlow) - this.indentNext = this.indentValue + 1; - else if (this.flowKey) - this.flowKey = false; - return ((yield* this.pushCount(1)) + - (yield* this.pushSpaces(true)) + - (yield* this.pushIndicators())); - } - } - } - return 0; - } - *pushTag() { - if (this.charAt(1) === '<') { - let i = this.pos + 2; - let ch = this.buffer[i]; - while (!isEmpty(ch) && ch !== '>') - ch = this.buffer[++i]; - return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); - } - else { - let i = this.pos + 1; - let ch = this.buffer[i]; - while (ch) { - if (tagChars.includes(ch)) - ch = this.buffer[++i]; - else if (ch === '%' && - hexDigits.includes(this.buffer[i + 1]) && - hexDigits.includes(this.buffer[i + 2])) { - ch = this.buffer[(i += 3)]; - } - else - break; - } - return yield* this.pushToIndex(i, false); - } - } - *pushNewline() { - const ch = this.buffer[this.pos]; - if (ch === '\n') - return yield* this.pushCount(1); - else if (ch === '\r' && this.charAt(1) === '\n') - return yield* this.pushCount(2); - else - return 0; - } - *pushSpaces(allowTabs) { - let i = this.pos - 1; - let ch; - do { - ch = this.buffer[++i]; - } while (ch === ' ' || (allowTabs && ch === '\t')); - const n = i - this.pos; - if (n > 0) { - yield this.buffer.substr(this.pos, n); - this.pos = i; - } - return n; - } - *pushUntil(test) { - let i = this.pos; - let ch = this.buffer[i]; - while (!test(ch)) - ch = this.buffer[++i]; - return yield* this.pushToIndex(i, false); - } -} - -exports.Lexer = Lexer; diff --git a/bin/node_modules/yaml/dist/parse/line-counter.d.ts b/bin/node_modules/yaml/dist/parse/line-counter.d.ts deleted file mode 100644 index b469095..0000000 --- a/bin/node_modules/yaml/dist/parse/line-counter.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -/** - * Tracks newlines during parsing in order to provide an efficient API for - * determining the one-indexed `{ line, col }` position for any offset - * within the input. - */ -export declare class LineCounter { - lineStarts: number[]; - /** - * Should be called in ascending order. Otherwise, call - * `lineCounter.lineStarts.sort()` before calling `linePos()`. - */ - addNewLine: (offset: number) => number; - /** - * Performs a binary search and returns the 1-indexed { line, col } - * position of `offset`. If `line === 0`, `addNewLine` has never been - * called or `offset` is before the first known newline. - */ - linePos: (offset: number) => { - line: number; - col: number; - }; -} diff --git a/bin/node_modules/yaml/dist/parse/line-counter.js b/bin/node_modules/yaml/dist/parse/line-counter.js deleted file mode 100644 index 0e7383b..0000000 --- a/bin/node_modules/yaml/dist/parse/line-counter.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; - -/** - * Tracks newlines during parsing in order to provide an efficient API for - * determining the one-indexed `{ line, col }` position for any offset - * within the input. - */ -class LineCounter { - constructor() { - this.lineStarts = []; - /** - * Should be called in ascending order. Otherwise, call - * `lineCounter.lineStarts.sort()` before calling `linePos()`. - */ - this.addNewLine = (offset) => this.lineStarts.push(offset); - /** - * Performs a binary search and returns the 1-indexed { line, col } - * position of `offset`. If `line === 0`, `addNewLine` has never been - * called or `offset` is before the first known newline. - */ - this.linePos = (offset) => { - let low = 0; - let high = this.lineStarts.length; - while (low < high) { - const mid = (low + high) >> 1; // Math.floor((low + high) / 2) - if (this.lineStarts[mid] < offset) - low = mid + 1; - else - high = mid; - } - if (this.lineStarts[low] === offset) - return { line: low + 1, col: 1 }; - if (low === 0) - return { line: 0, col: offset }; - const start = this.lineStarts[low - 1]; - return { line: low, col: offset - start + 1 }; - }; - } -} - -exports.LineCounter = LineCounter; diff --git a/bin/node_modules/yaml/dist/parse/parser.d.ts b/bin/node_modules/yaml/dist/parse/parser.d.ts deleted file mode 100644 index 8f3159f..0000000 --- a/bin/node_modules/yaml/dist/parse/parser.d.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { Token } from './cst.js'; -/** - * A YAML concrete syntax tree (CST) parser - * - * ```ts - * const src: string = ... - * for (const token of new Parser().parse(src)) { - * // token: Token - * } - * ``` - * - * To use the parser with a user-provided lexer: - * - * ```ts - * function* parse(source: string, lexer: Lexer) { - * const parser = new Parser() - * for (const lexeme of lexer.lex(source)) - * yield* parser.next(lexeme) - * yield* parser.end() - * } - * - * const src: string = ... - * const lexer = new Lexer() - * for (const token of parse(src, lexer)) { - * // token: Token - * } - * ``` - */ -export declare class Parser { - private onNewLine?; - /** If true, space and sequence indicators count as indentation */ - private atNewLine; - /** If true, next token is a scalar value */ - private atScalar; - /** Current indentation level */ - private indent; - /** Current offset since the start of parsing */ - offset: number; - /** On the same line with a block map key */ - private onKeyLine; - /** Top indicates the node that's currently being built */ - stack: Token[]; - /** The source of the current token, set in parse() */ - private source; - /** The type of the current token, set in parse() */ - private type; - /** - * @param onNewLine - If defined, called separately with the start position of - * each new line (in `parse()`, including the start of input). - */ - constructor(onNewLine?: (offset: number) => void); - /** - * Parse `source` as a YAML stream. - * If `incomplete`, a part of the last line may be left as a buffer for the next call. - * - * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. - * - * @returns A generator of tokens representing each directive, document, and other structure. - */ - parse(source: string, incomplete?: boolean): Generator; - /** - * Advance the parser by the `source` of one lexical token. - */ - next(source: string): Generator; - private lexer; - /** Call at end of input to push out any remaining constructions */ - end(): Generator; - private get sourceToken(); - private step; - private peek; - private pop; - private stream; - private document; - private scalar; - private blockScalar; - private blockMap; - private blockSequence; - private flowCollection; - private flowScalar; - private startBlockValue; - private atIndentedComment; - private documentEnd; - private lineEnd; -} diff --git a/bin/node_modules/yaml/dist/parse/parser.js b/bin/node_modules/yaml/dist/parse/parser.js deleted file mode 100644 index 7a1493d..0000000 --- a/bin/node_modules/yaml/dist/parse/parser.js +++ /dev/null @@ -1,957 +0,0 @@ -'use strict'; - -var cst = require('./cst.js'); -var lexer = require('./lexer.js'); - -function includesToken(list, type) { - for (let i = 0; i < list.length; ++i) - if (list[i].type === type) - return true; - return false; -} -function findNonEmptyIndex(list) { - for (let i = 0; i < list.length; ++i) { - switch (list[i].type) { - case 'space': - case 'comment': - case 'newline': - break; - default: - return i; - } - } - return -1; -} -function isFlowToken(token) { - switch (token?.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - case 'flow-collection': - return true; - default: - return false; - } -} -function getPrevProps(parent) { - switch (parent.type) { - case 'document': - return parent.start; - case 'block-map': { - const it = parent.items[parent.items.length - 1]; - return it.sep ?? it.start; - } - case 'block-seq': - return parent.items[parent.items.length - 1].start; - /* istanbul ignore next should not happen */ - default: - return []; - } -} -/** Note: May modify input array */ -function getFirstKeyStartProps(prev) { - if (prev.length === 0) - return []; - let i = prev.length; - loop: while (--i >= 0) { - switch (prev[i].type) { - case 'doc-start': - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - case 'newline': - break loop; - } - } - while (prev[++i]?.type === 'space') { - /* loop */ - } - return prev.splice(i, prev.length); -} -function fixFlowSeqItems(fc) { - if (fc.start.type === 'flow-seq-start') { - for (const it of fc.items) { - if (it.sep && - !it.value && - !includesToken(it.start, 'explicit-key-ind') && - !includesToken(it.sep, 'map-value-ind')) { - if (it.key) - it.value = it.key; - delete it.key; - if (isFlowToken(it.value)) { - if (it.value.end) - Array.prototype.push.apply(it.value.end, it.sep); - else - it.value.end = it.sep; - } - else - Array.prototype.push.apply(it.start, it.sep); - delete it.sep; - } - } - } -} -/** - * A YAML concrete syntax tree (CST) parser - * - * ```ts - * const src: string = ... - * for (const token of new Parser().parse(src)) { - * // token: Token - * } - * ``` - * - * To use the parser with a user-provided lexer: - * - * ```ts - * function* parse(source: string, lexer: Lexer) { - * const parser = new Parser() - * for (const lexeme of lexer.lex(source)) - * yield* parser.next(lexeme) - * yield* parser.end() - * } - * - * const src: string = ... - * const lexer = new Lexer() - * for (const token of parse(src, lexer)) { - * // token: Token - * } - * ``` - */ -class Parser { - /** - * @param onNewLine - If defined, called separately with the start position of - * each new line (in `parse()`, including the start of input). - */ - constructor(onNewLine) { - /** If true, space and sequence indicators count as indentation */ - this.atNewLine = true; - /** If true, next token is a scalar value */ - this.atScalar = false; - /** Current indentation level */ - this.indent = 0; - /** Current offset since the start of parsing */ - this.offset = 0; - /** On the same line with a block map key */ - this.onKeyLine = false; - /** Top indicates the node that's currently being built */ - this.stack = []; - /** The source of the current token, set in parse() */ - this.source = ''; - /** The type of the current token, set in parse() */ - this.type = ''; - // Must be defined after `next()` - this.lexer = new lexer.Lexer(); - this.onNewLine = onNewLine; - } - /** - * Parse `source` as a YAML stream. - * If `incomplete`, a part of the last line may be left as a buffer for the next call. - * - * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. - * - * @returns A generator of tokens representing each directive, document, and other structure. - */ - *parse(source, incomplete = false) { - if (this.onNewLine && this.offset === 0) - this.onNewLine(0); - for (const lexeme of this.lexer.lex(source, incomplete)) - yield* this.next(lexeme); - if (!incomplete) - yield* this.end(); - } - /** - * Advance the parser by the `source` of one lexical token. - */ - *next(source) { - this.source = source; - if (process.env.LOG_TOKENS) - console.log('|', cst.prettyToken(source)); - if (this.atScalar) { - this.atScalar = false; - yield* this.step(); - this.offset += source.length; - return; - } - const type = cst.tokenType(source); - if (!type) { - const message = `Not a YAML token: ${source}`; - yield* this.pop({ type: 'error', offset: this.offset, message, source }); - this.offset += source.length; - } - else if (type === 'scalar') { - this.atNewLine = false; - this.atScalar = true; - this.type = 'scalar'; - } - else { - this.type = type; - yield* this.step(); - switch (type) { - case 'newline': - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) - this.onNewLine(this.offset + source.length); - break; - case 'space': - if (this.atNewLine && source[0] === ' ') - this.indent += source.length; - break; - case 'explicit-key-ind': - case 'map-value-ind': - case 'seq-item-ind': - if (this.atNewLine) - this.indent += source.length; - break; - case 'doc-mode': - case 'flow-error-end': - return; - default: - this.atNewLine = false; - } - this.offset += source.length; - } - } - /** Call at end of input to push out any remaining constructions */ - *end() { - while (this.stack.length > 0) - yield* this.pop(); - } - get sourceToken() { - const st = { - type: this.type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - return st; - } - *step() { - const top = this.peek(1); - if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { - while (this.stack.length > 0) - yield* this.pop(); - this.stack.push({ - type: 'doc-end', - offset: this.offset, - source: this.source - }); - return; - } - if (!top) - return yield* this.stream(); - switch (top.type) { - case 'document': - return yield* this.document(top); - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return yield* this.scalar(top); - case 'block-scalar': - return yield* this.blockScalar(top); - case 'block-map': - return yield* this.blockMap(top); - case 'block-seq': - return yield* this.blockSequence(top); - case 'flow-collection': - return yield* this.flowCollection(top); - case 'doc-end': - return yield* this.documentEnd(top); - } - /* istanbul ignore next should not happen */ - yield* this.pop(); - } - peek(n) { - return this.stack[this.stack.length - n]; - } - *pop(error) { - const token = error ?? this.stack.pop(); - /* istanbul ignore if should not happen */ - if (!token) { - const message = 'Tried to pop an empty stack'; - yield { type: 'error', offset: this.offset, source: '', message }; - } - else if (this.stack.length === 0) { - yield token; - } - else { - const top = this.peek(1); - if (token.type === 'block-scalar') { - // Block scalars use their parent rather than header indent - token.indent = 'indent' in top ? top.indent : 0; - } - else if (token.type === 'flow-collection' && top.type === 'document') { - // Ignore all indent for top-level flow collections - token.indent = 0; - } - if (token.type === 'flow-collection') - fixFlowSeqItems(token); - switch (top.type) { - case 'document': - top.value = token; - break; - case 'block-scalar': - top.props.push(token); // error - break; - case 'block-map': { - const it = top.items[top.items.length - 1]; - if (it.value) { - top.items.push({ start: [], key: token, sep: [] }); - this.onKeyLine = true; - return; - } - else if (it.sep) { - it.value = token; - } - else { - Object.assign(it, { key: token, sep: [] }); - this.onKeyLine = !includesToken(it.start, 'explicit-key-ind'); - return; - } - break; - } - case 'block-seq': { - const it = top.items[top.items.length - 1]; - if (it.value) - top.items.push({ start: [], value: token }); - else - it.value = token; - break; - } - case 'flow-collection': { - const it = top.items[top.items.length - 1]; - if (!it || it.value) - top.items.push({ start: [], key: token, sep: [] }); - else if (it.sep) - it.value = token; - else - Object.assign(it, { key: token, sep: [] }); - return; - } - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.pop(token); - } - if ((top.type === 'document' || - top.type === 'block-map' || - top.type === 'block-seq') && - (token.type === 'block-map' || token.type === 'block-seq')) { - const last = token.items[token.items.length - 1]; - if (last && - !last.sep && - !last.value && - last.start.length > 0 && - findNonEmptyIndex(last.start) === -1 && - (token.indent === 0 || - last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { - if (top.type === 'document') - top.end = last.start; - else - top.items.push({ start: last.start }); - token.items.splice(-1, 1); - } - } - } - } - *stream() { - switch (this.type) { - case 'directive-line': - yield { type: 'directive', offset: this.offset, source: this.source }; - return; - case 'byte-order-mark': - case 'space': - case 'comment': - case 'newline': - yield this.sourceToken; - return; - case 'doc-mode': - case 'doc-start': { - const doc = { - type: 'document', - offset: this.offset, - start: [] - }; - if (this.type === 'doc-start') - doc.start.push(this.sourceToken); - this.stack.push(doc); - return; - } - } - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML stream`, - source: this.source - }; - } - *document(doc) { - if (doc.value) - return yield* this.lineEnd(doc); - switch (this.type) { - case 'doc-start': { - if (findNonEmptyIndex(doc.start) !== -1) { - yield* this.pop(); - yield* this.step(); - } - else - doc.start.push(this.sourceToken); - return; - } - case 'anchor': - case 'tag': - case 'space': - case 'comment': - case 'newline': - doc.start.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(doc); - if (bv) - this.stack.push(bv); - else { - yield { - type: 'error', - offset: this.offset, - message: `Unexpected ${this.type} token in YAML document`, - source: this.source - }; - } - } - *scalar(scalar) { - if (this.type === 'map-value-ind') { - const prev = getPrevProps(this.peek(2)); - const start = getFirstKeyStartProps(prev); - let sep; - if (scalar.end) { - sep = scalar.end; - sep.push(this.sourceToken); - delete scalar.end; - } - else - sep = [this.sourceToken]; - const map = { - type: 'block-map', - offset: scalar.offset, - indent: scalar.indent, - items: [{ start, key: scalar, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else - yield* this.lineEnd(scalar); - } - *blockScalar(scalar) { - switch (this.type) { - case 'space': - case 'comment': - case 'newline': - scalar.props.push(this.sourceToken); - return; - case 'scalar': - scalar.source = this.source; - // block-scalar source includes trailing newline - this.atNewLine = true; - this.indent = 0; - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - yield* this.pop(); - break; - /* istanbul ignore next should not happen */ - default: - yield* this.pop(); - yield* this.step(); - } - } - *blockMap(map) { - const it = map.items[map.items.length - 1]; - // it.sep is true-ish if pair already has key or : separator - switch (this.type) { - case 'newline': - this.onKeyLine = false; - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'space': - case 'comment': - if (it.value) { - map.items.push({ start: [this.sourceToken] }); - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - if (this.atIndentedComment(it.start, map.indent)) { - const prev = map.items[map.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - map.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - } - if (this.indent >= map.indent) { - const atNextItem = !this.onKeyLine && - this.indent === map.indent && - it.sep && - this.type !== 'seq-item-ind'; - // For empty nodes, assign newline-separated not indented empty tokens to following node - let start = []; - if (atNextItem && it.sep && !it.value) { - const nl = []; - for (let i = 0; i < it.sep.length; ++i) { - const st = it.sep[i]; - switch (st.type) { - case 'newline': - nl.push(i); - break; - case 'space': - break; - case 'comment': - if (st.indent > map.indent) - nl.length = 0; - break; - default: - nl.length = 0; - } - } - if (nl.length >= 2) - start = it.sep.splice(nl[1]); - } - switch (this.type) { - case 'anchor': - case 'tag': - if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - this.onKeyLine = true; - } - else if (it.sep) { - it.sep.push(this.sourceToken); - } - else { - it.start.push(this.sourceToken); - } - return; - case 'explicit-key-ind': - if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) { - it.start.push(this.sourceToken); - } - else if (atNextItem || it.value) { - start.push(this.sourceToken); - map.items.push({ start }); - } - else { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }); - } - this.onKeyLine = true; - return; - case 'map-value-ind': - if (includesToken(it.start, 'explicit-key-ind')) { - if (!it.sep) { - if (includesToken(it.start, 'newline')) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else { - const start = getFirstKeyStartProps(it.start); - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - } - else if (it.value) { - map.items.push({ start: [], key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }); - } - else if (isFlowToken(it.key) && - !includesToken(it.sep, 'newline')) { - const start = getFirstKeyStartProps(it.start); - const key = it.key; - const sep = it.sep; - sep.push(this.sourceToken); - // @ts-expect-error type guard is wrong here - delete it.key, delete it.sep; - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key, sep }] - }); - } - else if (start.length > 0) { - // Not actually at next item - it.sep = it.sep.concat(start, this.sourceToken); - } - else { - it.sep.push(this.sourceToken); - } - } - else { - if (!it.sep) { - Object.assign(it, { key: null, sep: [this.sourceToken] }); - } - else if (it.value || atNextItem) { - map.items.push({ start, key: null, sep: [this.sourceToken] }); - } - else if (includesToken(it.sep, 'map-value-ind')) { - this.stack.push({ - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start: [], key: null, sep: [this.sourceToken] }] - }); - } - else { - it.sep.push(this.sourceToken); - } - } - this.onKeyLine = true; - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (atNextItem || it.value) { - map.items.push({ start, key: fs, sep: [] }); - this.onKeyLine = true; - } - else if (it.sep) { - this.stack.push(fs); - } - else { - Object.assign(it, { key: fs, sep: [] }); - this.onKeyLine = true; - } - return; - } - default: { - const bv = this.startBlockValue(map); - if (bv) { - if (atNextItem && - bv.type !== 'block-seq' && - includesToken(it.start, 'explicit-key-ind')) { - map.items.push({ start }); - } - this.stack.push(bv); - return; - } - } - } - } - yield* this.pop(); - yield* this.step(); - } - *blockSequence(seq) { - const it = seq.items[seq.items.length - 1]; - switch (this.type) { - case 'newline': - if (it.value) { - const end = 'end' in it.value ? it.value.end : undefined; - const last = Array.isArray(end) ? end[end.length - 1] : undefined; - if (last?.type === 'comment') - end?.push(this.sourceToken); - else - seq.items.push({ start: [this.sourceToken] }); - } - else - it.start.push(this.sourceToken); - return; - case 'space': - case 'comment': - if (it.value) - seq.items.push({ start: [this.sourceToken] }); - else { - if (this.atIndentedComment(it.start, seq.indent)) { - const prev = seq.items[seq.items.length - 2]; - const end = prev?.value?.end; - if (Array.isArray(end)) { - Array.prototype.push.apply(end, it.start); - end.push(this.sourceToken); - seq.items.pop(); - return; - } - } - it.start.push(this.sourceToken); - } - return; - case 'anchor': - case 'tag': - if (it.value || this.indent <= seq.indent) - break; - it.start.push(this.sourceToken); - return; - case 'seq-item-ind': - if (this.indent !== seq.indent) - break; - if (it.value || includesToken(it.start, 'seq-item-ind')) - seq.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - } - if (this.indent > seq.indent) { - const bv = this.startBlockValue(seq); - if (bv) { - this.stack.push(bv); - return; - } - } - yield* this.pop(); - yield* this.step(); - } - *flowCollection(fc) { - const it = fc.items[fc.items.length - 1]; - if (this.type === 'flow-error-end') { - let top; - do { - yield* this.pop(); - top = this.peek(1); - } while (top && top.type === 'flow-collection'); - } - else if (fc.end.length === 0) { - switch (this.type) { - case 'comma': - case 'explicit-key-ind': - if (!it || it.sep) - fc.items.push({ start: [this.sourceToken] }); - else - it.start.push(this.sourceToken); - return; - case 'map-value-ind': - if (!it || it.value) - fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - Object.assign(it, { key: null, sep: [this.sourceToken] }); - return; - case 'space': - case 'comment': - case 'newline': - case 'anchor': - case 'tag': - if (!it || it.value) - fc.items.push({ start: [this.sourceToken] }); - else if (it.sep) - it.sep.push(this.sourceToken); - else - it.start.push(this.sourceToken); - return; - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': { - const fs = this.flowScalar(this.type); - if (!it || it.value) - fc.items.push({ start: [], key: fs, sep: [] }); - else if (it.sep) - this.stack.push(fs); - else - Object.assign(it, { key: fs, sep: [] }); - return; - } - case 'flow-map-end': - case 'flow-seq-end': - fc.end.push(this.sourceToken); - return; - } - const bv = this.startBlockValue(fc); - /* istanbul ignore else should not happen */ - if (bv) - this.stack.push(bv); - else { - yield* this.pop(); - yield* this.step(); - } - } - else { - const parent = this.peek(2); - if (parent.type === 'block-map' && - ((this.type === 'map-value-ind' && parent.indent === fc.indent) || - (this.type === 'newline' && - !parent.items[parent.items.length - 1].sep))) { - yield* this.pop(); - yield* this.step(); - } - else if (this.type === 'map-value-ind' && - parent.type !== 'flow-collection') { - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - fixFlowSeqItems(fc); - const sep = fc.end.splice(1, fc.end.length); - sep.push(this.sourceToken); - const map = { - type: 'block-map', - offset: fc.offset, - indent: fc.indent, - items: [{ start, key: fc, sep }] - }; - this.onKeyLine = true; - this.stack[this.stack.length - 1] = map; - } - else { - yield* this.lineEnd(fc); - } - } - } - flowScalar(type) { - if (this.onNewLine) { - let nl = this.source.indexOf('\n') + 1; - while (nl !== 0) { - this.onNewLine(this.offset + nl); - nl = this.source.indexOf('\n', nl) + 1; - } - } - return { - type, - offset: this.offset, - indent: this.indent, - source: this.source - }; - } - startBlockValue(parent) { - switch (this.type) { - case 'alias': - case 'scalar': - case 'single-quoted-scalar': - case 'double-quoted-scalar': - return this.flowScalar(this.type); - case 'block-scalar-header': - return { - type: 'block-scalar', - offset: this.offset, - indent: this.indent, - props: [this.sourceToken], - source: '' - }; - case 'flow-map-start': - case 'flow-seq-start': - return { - type: 'flow-collection', - offset: this.offset, - indent: this.indent, - start: this.sourceToken, - items: [], - end: [] - }; - case 'seq-item-ind': - return { - type: 'block-seq', - offset: this.offset, - indent: this.indent, - items: [{ start: [this.sourceToken] }] - }; - case 'explicit-key-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - start.push(this.sourceToken); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start }] - }; - } - case 'map-value-ind': { - this.onKeyLine = true; - const prev = getPrevProps(parent); - const start = getFirstKeyStartProps(prev); - return { - type: 'block-map', - offset: this.offset, - indent: this.indent, - items: [{ start, key: null, sep: [this.sourceToken] }] - }; - } - } - return null; - } - atIndentedComment(start, indent) { - if (this.type !== 'comment') - return false; - if (this.indent <= indent) - return false; - return start.every(st => st.type === 'newline' || st.type === 'space'); - } - *documentEnd(docEnd) { - if (this.type !== 'doc-mode') { - if (docEnd.end) - docEnd.end.push(this.sourceToken); - else - docEnd.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } - *lineEnd(token) { - switch (this.type) { - case 'comma': - case 'doc-start': - case 'doc-end': - case 'flow-seq-end': - case 'flow-map-end': - case 'map-value-ind': - yield* this.pop(); - yield* this.step(); - break; - case 'newline': - this.onKeyLine = false; - // fallthrough - case 'space': - case 'comment': - default: - // all other values are errors - if (token.end) - token.end.push(this.sourceToken); - else - token.end = [this.sourceToken]; - if (this.type === 'newline') - yield* this.pop(); - } - } -} - -exports.Parser = Parser; diff --git a/bin/node_modules/yaml/dist/public-api.d.ts b/bin/node_modules/yaml/dist/public-api.d.ts deleted file mode 100644 index 2b771ca..0000000 --- a/bin/node_modules/yaml/dist/public-api.d.ts +++ /dev/null @@ -1,43 +0,0 @@ -import { Composer } from './compose/composer.js'; -import type { Reviver } from './doc/applyReviver.js'; -import { Document, Replacer } from './doc/Document.js'; -import type { Node, ParsedNode } from './nodes/Node.js'; -import type { CreateNodeOptions, DocumentOptions, ParseOptions, SchemaOptions, ToJSOptions, ToStringOptions } from './options.js'; -export interface EmptyStream extends Array, ReturnType { - empty: true; -} -/** - * Parse the input as a stream of YAML documents. - * - * Documents should be separated from each other by `...` or `---` marker lines. - * - * @returns If an empty `docs` array is returned, it will be of type - * EmptyStream and contain additional stream information. In - * TypeScript, you should use `'empty' in docs` as a type guard for it. - */ -export declare function parseAllDocuments(source: string, options?: ParseOptions & DocumentOptions & SchemaOptions): Array : Document> | EmptyStream; -/** Parse an input string into a single YAML.Document */ -export declare function parseDocument(source: string, options?: ParseOptions & DocumentOptions & SchemaOptions): Contents extends ParsedNode ? Document.Parsed : Document; -/** - * Parse an input string into JavaScript. - * - * Only supports input consisting of a single YAML document; for multi-document - * support you should use `YAML.parseAllDocuments`. May throw on error, and may - * log warnings using `console.warn`. - * - * @param str - A string with YAML formatting. - * @param reviver - A reviver function, as in `JSON.parse()` - * @returns The value will match the type of the root value of the parsed YAML - * document, so Maps become objects, Sequences arrays, and scalars result in - * nulls, booleans, numbers and strings. - */ -export declare function parse(src: string, options?: ParseOptions & DocumentOptions & SchemaOptions & ToJSOptions): any; -export declare function parse(src: string, reviver: Reviver, options?: ParseOptions & DocumentOptions & SchemaOptions & ToJSOptions): any; -/** - * Stringify a value as a YAML document. - * - * @param replacer - A replacer array or function, as in `JSON.stringify()` - * @returns Will always include `\n` as the last character, as is expected of YAML documents. - */ -export declare function stringify(value: any, options?: DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions & ToStringOptions): string; -export declare function stringify(value: any, replacer?: Replacer | null, options?: string | number | (DocumentOptions & SchemaOptions & ParseOptions & CreateNodeOptions & ToStringOptions)): string; diff --git a/bin/node_modules/yaml/dist/public-api.js b/bin/node_modules/yaml/dist/public-api.js deleted file mode 100644 index 38aad92..0000000 --- a/bin/node_modules/yaml/dist/public-api.js +++ /dev/null @@ -1,104 +0,0 @@ -'use strict'; - -var composer = require('./compose/composer.js'); -var Document = require('./doc/Document.js'); -var errors = require('./errors.js'); -var log = require('./log.js'); -var lineCounter = require('./parse/line-counter.js'); -var parser = require('./parse/parser.js'); - -function parseOptions(options) { - const prettyErrors = options.prettyErrors !== false; - const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null; - return { lineCounter: lineCounter$1, prettyErrors }; -} -/** - * Parse the input as a stream of YAML documents. - * - * Documents should be separated from each other by `...` or `---` marker lines. - * - * @returns If an empty `docs` array is returned, it will be of type - * EmptyStream and contain additional stream information. In - * TypeScript, you should use `'empty' in docs` as a type guard for it. - */ -function parseAllDocuments(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - const docs = Array.from(composer$1.compose(parser$1.parse(source))); - if (prettyErrors && lineCounter) - for (const doc of docs) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - if (docs.length > 0) - return docs; - return Object.assign([], { empty: true }, composer$1.streamInfo()); -} -/** Parse an input string into a single YAML.Document */ -function parseDocument(source, options = {}) { - const { lineCounter, prettyErrors } = parseOptions(options); - const parser$1 = new parser.Parser(lineCounter?.addNewLine); - const composer$1 = new composer.Composer(options); - // `doc` is always set by compose.end(true) at the very latest - let doc = null; - for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) { - if (!doc) - doc = _doc; - else if (doc.options.logLevel !== 'silent') { - doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); - break; - } - } - if (prettyErrors && lineCounter) { - doc.errors.forEach(errors.prettifyError(source, lineCounter)); - doc.warnings.forEach(errors.prettifyError(source, lineCounter)); - } - return doc; -} -function parse(src, reviver, options) { - let _reviver = undefined; - if (typeof reviver === 'function') { - _reviver = reviver; - } - else if (options === undefined && reviver && typeof reviver === 'object') { - options = reviver; - } - const doc = parseDocument(src, options); - if (!doc) - return null; - doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning)); - if (doc.errors.length > 0) { - if (doc.options.logLevel !== 'silent') - throw doc.errors[0]; - else - doc.errors = []; - } - return doc.toJS(Object.assign({ reviver: _reviver }, options)); -} -function stringify(value, replacer, options) { - let _replacer = null; - if (typeof replacer === 'function' || Array.isArray(replacer)) { - _replacer = replacer; - } - else if (options === undefined && replacer) { - options = replacer; - } - if (typeof options === 'string') - options = options.length; - if (typeof options === 'number') { - const indent = Math.round(options); - options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; - } - if (value === undefined) { - const { keepUndefined } = options ?? replacer ?? {}; - if (!keepUndefined) - return undefined; - } - return new Document.Document(value, _replacer, options).toString(options); -} - -exports.parse = parse; -exports.parseAllDocuments = parseAllDocuments; -exports.parseDocument = parseDocument; -exports.stringify = stringify; diff --git a/bin/node_modules/yaml/dist/schema/Schema.d.ts b/bin/node_modules/yaml/dist/schema/Schema.d.ts deleted file mode 100644 index fdd9e6b..0000000 --- a/bin/node_modules/yaml/dist/schema/Schema.d.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { MAP, SCALAR, SEQ } from '../nodes/identity.js'; -import type { Pair } from '../nodes/Pair.js'; -import type { SchemaOptions, ToStringOptions } from '../options.js'; -import type { CollectionTag, ScalarTag } from './types.js'; -export declare class Schema { - compat: Array | null; - knownTags: Record; - merge: boolean; - name: string; - sortMapEntries: ((a: Pair, b: Pair) => number) | null; - tags: Array; - toStringOptions: Readonly | null; - readonly [MAP]: CollectionTag; - readonly [SCALAR]: ScalarTag; - readonly [SEQ]: CollectionTag; - constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }: SchemaOptions); - clone(): Schema; -} diff --git a/bin/node_modules/yaml/dist/schema/Schema.js b/bin/node_modules/yaml/dist/schema/Schema.js deleted file mode 100644 index 91521d0..0000000 --- a/bin/node_modules/yaml/dist/schema/Schema.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var map = require('./common/map.js'); -var seq = require('./common/seq.js'); -var string = require('./common/string.js'); -var tags = require('./tags.js'); - -const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; -class Schema { - constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { - this.compat = Array.isArray(compat) - ? tags.getTags(compat, 'compat') - : compat - ? tags.getTags(null, compat) - : null; - this.merge = !!merge; - this.name = (typeof schema === 'string' && schema) || 'core'; - this.knownTags = resolveKnownTags ? tags.coreKnownTags : {}; - this.tags = tags.getTags(customTags, this.name); - this.toStringOptions = toStringDefaults ?? null; - Object.defineProperty(this, identity.MAP, { value: map.map }); - Object.defineProperty(this, identity.SCALAR, { value: string.string }); - Object.defineProperty(this, identity.SEQ, { value: seq.seq }); - // Used by createMap() - this.sortMapEntries = - typeof sortMapEntries === 'function' - ? sortMapEntries - : sortMapEntries === true - ? sortMapEntriesByKey - : null; - } - clone() { - const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); - copy.tags = this.tags.slice(); - return copy; - } -} - -exports.Schema = Schema; diff --git a/bin/node_modules/yaml/dist/schema/common/map.d.ts b/bin/node_modules/yaml/dist/schema/common/map.d.ts deleted file mode 100644 index 9b300f8..0000000 --- a/bin/node_modules/yaml/dist/schema/common/map.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { CollectionTag } from '../types.js'; -export declare const map: CollectionTag; diff --git a/bin/node_modules/yaml/dist/schema/common/map.js b/bin/node_modules/yaml/dist/schema/common/map.js deleted file mode 100644 index 649c3b9..0000000 --- a/bin/node_modules/yaml/dist/schema/common/map.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var YAMLMap = require('../../nodes/YAMLMap.js'); - -const map = { - collection: 'map', - default: true, - nodeClass: YAMLMap.YAMLMap, - tag: 'tag:yaml.org,2002:map', - resolve(map, onError) { - if (!identity.isMap(map)) - onError('Expected a mapping for this tag'); - return map; - }, - createNode: (schema, obj, ctx) => YAMLMap.YAMLMap.from(schema, obj, ctx) -}; - -exports.map = map; diff --git a/bin/node_modules/yaml/dist/schema/common/null.d.ts b/bin/node_modules/yaml/dist/schema/common/null.d.ts deleted file mode 100644 index 66abea5..0000000 --- a/bin/node_modules/yaml/dist/schema/common/null.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const nullTag: ScalarTag & { - test: RegExp; -}; diff --git a/bin/node_modules/yaml/dist/schema/common/null.js b/bin/node_modules/yaml/dist/schema/common/null.js deleted file mode 100644 index cb353a7..0000000 --- a/bin/node_modules/yaml/dist/schema/common/null.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); - -const nullTag = { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^(?:~|[Nn]ull|NULL)?$/, - resolve: () => new Scalar.Scalar(null), - stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) - ? source - : ctx.options.nullStr -}; - -exports.nullTag = nullTag; diff --git a/bin/node_modules/yaml/dist/schema/common/seq.d.ts b/bin/node_modules/yaml/dist/schema/common/seq.d.ts deleted file mode 100644 index c038d30..0000000 --- a/bin/node_modules/yaml/dist/schema/common/seq.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { CollectionTag } from '../types.js'; -export declare const seq: CollectionTag; diff --git a/bin/node_modules/yaml/dist/schema/common/seq.js b/bin/node_modules/yaml/dist/schema/common/seq.js deleted file mode 100644 index 9c54bc9..0000000 --- a/bin/node_modules/yaml/dist/schema/common/seq.js +++ /dev/null @@ -1,19 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var YAMLSeq = require('../../nodes/YAMLSeq.js'); - -const seq = { - collection: 'seq', - default: true, - nodeClass: YAMLSeq.YAMLSeq, - tag: 'tag:yaml.org,2002:seq', - resolve(seq, onError) { - if (!identity.isSeq(seq)) - onError('Expected a sequence for this tag'); - return seq; - }, - createNode: (schema, obj, ctx) => YAMLSeq.YAMLSeq.from(schema, obj, ctx) -}; - -exports.seq = seq; diff --git a/bin/node_modules/yaml/dist/schema/common/string.d.ts b/bin/node_modules/yaml/dist/schema/common/string.d.ts deleted file mode 100644 index 539c9b1..0000000 --- a/bin/node_modules/yaml/dist/schema/common/string.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const string: ScalarTag; diff --git a/bin/node_modules/yaml/dist/schema/common/string.js b/bin/node_modules/yaml/dist/schema/common/string.js deleted file mode 100644 index 7601420..0000000 --- a/bin/node_modules/yaml/dist/schema/common/string.js +++ /dev/null @@ -1,16 +0,0 @@ -'use strict'; - -var stringifyString = require('../../stringify/stringifyString.js'); - -const string = { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify(item, ctx, onComment, onChompKeep) { - ctx = Object.assign({ actualString: true }, ctx); - return stringifyString.stringifyString(item, ctx, onComment, onChompKeep); - } -}; - -exports.string = string; diff --git a/bin/node_modules/yaml/dist/schema/core/bool.d.ts b/bin/node_modules/yaml/dist/schema/core/bool.d.ts deleted file mode 100644 index e4bdc4c..0000000 --- a/bin/node_modules/yaml/dist/schema/core/bool.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const boolTag: ScalarTag & { - test: RegExp; -}; diff --git a/bin/node_modules/yaml/dist/schema/core/bool.js b/bin/node_modules/yaml/dist/schema/core/bool.js deleted file mode 100644 index 4def73c..0000000 --- a/bin/node_modules/yaml/dist/schema/core/bool.js +++ /dev/null @@ -1,21 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); - -const boolTag = { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, - resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'), - stringify({ source, value }, ctx) { - if (source && boolTag.test.test(source)) { - const sv = source[0] === 't' || source[0] === 'T'; - if (value === sv) - return source; - } - return value ? ctx.options.trueStr : ctx.options.falseStr; - } -}; - -exports.boolTag = boolTag; diff --git a/bin/node_modules/yaml/dist/schema/core/float.d.ts b/bin/node_modules/yaml/dist/schema/core/float.d.ts deleted file mode 100644 index 22f0249..0000000 --- a/bin/node_modules/yaml/dist/schema/core/float.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const floatNaN: ScalarTag; -export declare const floatExp: ScalarTag; -export declare const float: ScalarTag; diff --git a/bin/node_modules/yaml/dist/schema/core/float.js b/bin/node_modules/yaml/dist/schema/core/float.js deleted file mode 100644 index 8756446..0000000 --- a/bin/node_modules/yaml/dist/schema/core/float.js +++ /dev/null @@ -1,47 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); -var stringifyNumber = require('../../stringify/stringifyNumber.js'); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/, - resolve: str => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, - resolve: str => parseFloat(str), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str)); - const dot = str.indexOf('.'); - if (dot !== -1 && str[str.length - 1] === '0') - node.minFractionDigits = str.length - dot - 1; - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; diff --git a/bin/node_modules/yaml/dist/schema/core/int.d.ts b/bin/node_modules/yaml/dist/schema/core/int.d.ts deleted file mode 100644 index 35e2d4b..0000000 --- a/bin/node_modules/yaml/dist/schema/core/int.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const intOct: ScalarTag; -export declare const int: ScalarTag; -export declare const intHex: ScalarTag; diff --git a/bin/node_modules/yaml/dist/schema/core/int.js b/bin/node_modules/yaml/dist/schema/core/int.js deleted file mode 100644 index fe4c9ca..0000000 --- a/bin/node_modules/yaml/dist/schema/core/int.js +++ /dev/null @@ -1,42 +0,0 @@ -'use strict'; - -var stringifyNumber = require('../../stringify/stringifyNumber.js'); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value) && value >= 0) - return prefix + value.toString(radix); - return stringifyNumber.stringifyNumber(node); -} -const intOct = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^0o[0-7]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt), - stringify: node => intStringify(node, 8, '0o') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9]+$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: value => intIdentify(value) && value >= 0, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^0x[0-9a-fA-F]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intHex = intHex; -exports.intOct = intOct; diff --git a/bin/node_modules/yaml/dist/schema/core/schema.d.ts b/bin/node_modules/yaml/dist/schema/core/schema.d.ts deleted file mode 100644 index f5bdd21..0000000 --- a/bin/node_modules/yaml/dist/schema/core/schema.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const schema: (import("../types.js").CollectionTag | import("../types.js").ScalarTag)[]; diff --git a/bin/node_modules/yaml/dist/schema/core/schema.js b/bin/node_modules/yaml/dist/schema/core/schema.js deleted file mode 100644 index 6ab87f2..0000000 --- a/bin/node_modules/yaml/dist/schema/core/schema.js +++ /dev/null @@ -1,25 +0,0 @@ -'use strict'; - -var map = require('../common/map.js'); -var _null = require('../common/null.js'); -var seq = require('../common/seq.js'); -var string = require('../common/string.js'); -var bool = require('./bool.js'); -var float = require('./float.js'); -var int = require('./int.js'); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.boolTag, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float -]; - -exports.schema = schema; diff --git a/bin/node_modules/yaml/dist/schema/json-schema.d.ts b/bin/node_modules/yaml/dist/schema/json-schema.d.ts deleted file mode 100644 index 6d51f40..0000000 --- a/bin/node_modules/yaml/dist/schema/json-schema.d.ts +++ /dev/null @@ -1,69 +0,0 @@ -type JsonSchema = boolean | ArraySchema | ObjectSchema | NumberSchema | StringSchema; -type JsonType = 'array' | 'object' | 'string' | 'number' | 'integer' | 'boolean' | 'null'; -interface CommonSchema { - type?: JsonType | JsonType[]; - const?: unknown; - enum?: unknown[]; - format?: string; - allOf?: JsonSchema[]; - anyOf?: JsonSchema[]; - oneOf?: JsonSchema[]; - not?: JsonSchema; - if?: JsonSchema; - then?: JsonSchema; - else?: JsonSchema; - $id?: string; - $defs?: Record; - $anchor?: string; - $dynamicAnchor?: string; - $ref?: string; - $dynamicRef?: string; - $schema?: string; - $vocabulary?: Record; - $comment?: string; - default?: unknown; - deprecated?: boolean; - readOnly?: boolean; - writeOnly?: boolean; - title?: string; - description?: string; - examples?: unknown[]; -} -interface ArraySchema extends CommonSchema { - prefixItems?: JsonSchema[]; - items?: JsonSchema; - contains?: JsonSchema; - unevaluatedItems?: JsonSchema; - maxItems?: number; - minItems?: number; - uniqueItems?: boolean; - maxContains?: number; - minContains?: number; -} -interface ObjectSchema extends CommonSchema { - properties?: Record; - patternProperties?: Record; - additionalProperties?: JsonSchema; - propertyNames?: JsonSchema; - unevaluatedProperties?: JsonSchema; - maxProperties?: number; - minProperties?: number; - required?: string[]; - dependentRequired?: Record; - dependentSchemas?: Record; -} -interface StringSchema extends CommonSchema { - maxLength?: number; - minLength?: number; - patter?: string; - contentEncoding?: string; - contentMediaType?: string; - contentSchema?: JsonSchema; -} -interface NumberSchema extends CommonSchema { - multipleOf?: number; - maximum?: number; - exclusiveMaximum?: number; - minimum?: number; - exclusiveMinimum?: number; -} diff --git a/bin/node_modules/yaml/dist/schema/json/schema.d.ts b/bin/node_modules/yaml/dist/schema/json/schema.d.ts deleted file mode 100644 index 76a4301..0000000 --- a/bin/node_modules/yaml/dist/schema/json/schema.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import { CollectionTag, ScalarTag } from '../types.js'; -export declare const schema: (CollectionTag | ScalarTag)[]; diff --git a/bin/node_modules/yaml/dist/schema/json/schema.js b/bin/node_modules/yaml/dist/schema/json/schema.js deleted file mode 100644 index 31d0b4d..0000000 --- a/bin/node_modules/yaml/dist/schema/json/schema.js +++ /dev/null @@ -1,64 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); -var map = require('../common/map.js'); -var seq = require('../common/seq.js'); - -function intIdentify(value) { - return typeof value === 'bigint' || Number.isInteger(value); -} -const stringifyJSON = ({ value }) => JSON.stringify(value); -const jsonScalars = [ - { - identify: value => typeof value === 'string', - default: true, - tag: 'tag:yaml.org,2002:str', - resolve: str => str, - stringify: stringifyJSON - }, - { - identify: value => value == null, - createNode: () => new Scalar.Scalar(null), - default: true, - tag: 'tag:yaml.org,2002:null', - test: /^null$/, - resolve: () => null, - stringify: stringifyJSON - }, - { - identify: value => typeof value === 'boolean', - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^true|false$/, - resolve: str => str === 'true', - stringify: stringifyJSON - }, - { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^-?(?:0|[1-9][0-9]*)$/, - resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), - stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value) - }, - { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, - resolve: str => parseFloat(str), - stringify: stringifyJSON - } -]; -const jsonError = { - default: true, - tag: '', - test: /^/, - resolve(str, onError) { - onError(`Unresolved plain scalar ${JSON.stringify(str)}`); - return str; - } -}; -const schema = [map.map, seq.seq].concat(jsonScalars, jsonError); - -exports.schema = schema; diff --git a/bin/node_modules/yaml/dist/schema/tags.d.ts b/bin/node_modules/yaml/dist/schema/tags.d.ts deleted file mode 100644 index 133333f..0000000 --- a/bin/node_modules/yaml/dist/schema/tags.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { SchemaOptions } from '../options.js'; -import type { CollectionTag, ScalarTag } from './types.js'; -declare const tagsByName: { - binary: ScalarTag; - bool: ScalarTag & { - test: RegExp; - }; - float: ScalarTag; - floatExp: ScalarTag; - floatNaN: ScalarTag; - floatTime: ScalarTag; - int: ScalarTag; - intHex: ScalarTag; - intOct: ScalarTag; - intTime: ScalarTag; - map: CollectionTag; - null: ScalarTag & { - test: RegExp; - }; - omap: CollectionTag; - pairs: CollectionTag; - seq: CollectionTag; - set: CollectionTag; - timestamp: ScalarTag & { - test: RegExp; - }; -}; -export type TagId = keyof typeof tagsByName; -export type Tags = Array; -export declare const coreKnownTags: { - 'tag:yaml.org,2002:binary': ScalarTag; - 'tag:yaml.org,2002:omap': CollectionTag; - 'tag:yaml.org,2002:pairs': CollectionTag; - 'tag:yaml.org,2002:set': CollectionTag; - 'tag:yaml.org,2002:timestamp': ScalarTag & { - test: RegExp; - }; -}; -export declare function getTags(customTags: SchemaOptions['customTags'] | undefined, schemaName: string): (CollectionTag | ScalarTag)[]; -export {}; diff --git a/bin/node_modules/yaml/dist/schema/tags.js b/bin/node_modules/yaml/dist/schema/tags.js deleted file mode 100644 index e49d2b0..0000000 --- a/bin/node_modules/yaml/dist/schema/tags.js +++ /dev/null @@ -1,86 +0,0 @@ -'use strict'; - -var map = require('./common/map.js'); -var _null = require('./common/null.js'); -var seq = require('./common/seq.js'); -var string = require('./common/string.js'); -var bool = require('./core/bool.js'); -var float = require('./core/float.js'); -var int = require('./core/int.js'); -var schema = require('./core/schema.js'); -var schema$1 = require('./json/schema.js'); -var binary = require('./yaml-1.1/binary.js'); -var omap = require('./yaml-1.1/omap.js'); -var pairs = require('./yaml-1.1/pairs.js'); -var schema$2 = require('./yaml-1.1/schema.js'); -var set = require('./yaml-1.1/set.js'); -var timestamp = require('./yaml-1.1/timestamp.js'); - -const schemas = new Map([ - ['core', schema.schema], - ['failsafe', [map.map, seq.seq, string.string]], - ['json', schema$1.schema], - ['yaml11', schema$2.schema], - ['yaml-1.1', schema$2.schema] -]); -const tagsByName = { - binary: binary.binary, - bool: bool.boolTag, - float: float.float, - floatExp: float.floatExp, - floatNaN: float.floatNaN, - floatTime: timestamp.floatTime, - int: int.int, - intHex: int.intHex, - intOct: int.intOct, - intTime: timestamp.intTime, - map: map.map, - null: _null.nullTag, - omap: omap.omap, - pairs: pairs.pairs, - seq: seq.seq, - set: set.set, - timestamp: timestamp.timestamp -}; -const coreKnownTags = { - 'tag:yaml.org,2002:binary': binary.binary, - 'tag:yaml.org,2002:omap': omap.omap, - 'tag:yaml.org,2002:pairs': pairs.pairs, - 'tag:yaml.org,2002:set': set.set, - 'tag:yaml.org,2002:timestamp': timestamp.timestamp -}; -function getTags(customTags, schemaName) { - let tags = schemas.get(schemaName); - if (!tags) { - if (Array.isArray(customTags)) - tags = []; - else { - const keys = Array.from(schemas.keys()) - .filter(key => key !== 'yaml11') - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); - } - } - if (Array.isArray(customTags)) { - for (const tag of customTags) - tags = tags.concat(tag); - } - else if (typeof customTags === 'function') { - tags = customTags(tags.slice()); - } - return tags.map(tag => { - if (typeof tag !== 'string') - return tag; - const tagObj = tagsByName[tag]; - if (tagObj) - return tagObj; - const keys = Object.keys(tagsByName) - .map(key => JSON.stringify(key)) - .join(', '); - throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`); - }); -} - -exports.coreKnownTags = coreKnownTags; -exports.getTags = getTags; diff --git a/bin/node_modules/yaml/dist/schema/types.d.ts b/bin/node_modules/yaml/dist/schema/types.d.ts deleted file mode 100644 index e814f1e..0000000 --- a/bin/node_modules/yaml/dist/schema/types.d.ts +++ /dev/null @@ -1,90 +0,0 @@ -import type { CreateNodeContext } from '../doc/createNode.js'; -import type { Node } from '../nodes/Node.js'; -import type { Scalar } from '../nodes/Scalar.js'; -import type { YAMLMap } from '../nodes/YAMLMap.js'; -import type { YAMLSeq } from '../nodes/YAMLSeq.js'; -import type { ParseOptions } from '../options.js'; -import type { StringifyContext } from '../stringify/stringify.js'; -import type { Schema } from './Schema.js'; -interface TagBase { - /** - * An optional factory function, used e.g. by collections when wrapping JS objects as AST nodes. - */ - createNode?: (schema: Schema, value: unknown, ctx: CreateNodeContext) => Node; - /** - * If `true`, together with `test` allows for values to be stringified without - * an explicit tag. For most cases, it's unlikely that you'll actually want to - * use this, even if you first think you do. - */ - default?: boolean; - /** - * If a tag has multiple forms that should be parsed and/or stringified - * differently, use `format` to identify them. - */ - format?: string; - /** - * Used by `YAML.createNode` to detect your data type, e.g. using `typeof` or - * `instanceof`. - */ - identify?: (value: unknown) => boolean; - /** - * The identifier for your data type, with which its stringified form will be - * prefixed. Should either be a !-prefixed local `!tag`, or a fully qualified - * `tag:domain,date:foo`. - */ - tag: string; -} -export interface ScalarTag extends TagBase { - collection?: never; - nodeClass?: never; - /** - * Turns a value into an AST node. - * If returning a non-`Node` value, the output will be wrapped as a `Scalar`. - */ - resolve(value: string, onError: (message: string) => void, options: ParseOptions): unknown; - /** - * Optional function stringifying a Scalar node. If your data includes a - * suitable `.toString()` method, you can probably leave this undefined and - * use the default stringifier. - * - * @param item The node being stringified. - * @param ctx Contains the stringifying context variables. - * @param onComment Callback to signal that the stringifier includes the - * item's comment in its output. - * @param onChompKeep Callback to signal that the output uses a block scalar - * type with the `+` chomping indicator. - */ - stringify?: (item: Scalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void) => string; - /** - * Together with `default` allows for values to be stringified without an - * explicit tag and detected using a regular expression. For most cases, it's - * unlikely that you'll actually want to use these, even if you first think - * you do. - */ - test?: RegExp; -} -export interface CollectionTag extends TagBase { - stringify?: never; - test?: never; - /** The source collection type supported by this tag. */ - collection: 'map' | 'seq'; - /** - * The `Node` child class that implements this tag. - * If set, used to select this tag when stringifying. - * - * If the class provides a static `from` method, then that - * will be used if the tag object doesn't have a `createNode` method. - */ - nodeClass?: { - new (schema?: Schema): Node; - from?: (schema: Schema, obj: unknown, ctx: CreateNodeContext) => Node; - }; - /** - * Turns a value into an AST node. - * If returning a non-`Node` value, the output will be wrapped as a `Scalar`. - * - * Note: this is required if nodeClass is not provided. - */ - resolve?: (value: YAMLMap.Parsed | YAMLSeq.Parsed, onError: (message: string) => void, options: ParseOptions) => unknown; -} -export {}; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts b/bin/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts deleted file mode 100644 index 2054970..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/binary.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const binary: ScalarTag; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/binary.js b/bin/node_modules/yaml/dist/schema/yaml-1.1/binary.js deleted file mode 100644 index 38fa498..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/binary.js +++ /dev/null @@ -1,68 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); -var stringifyString = require('../../stringify/stringifyString.js'); - -const binary = { - identify: value => value instanceof Uint8Array, // Buffer inherits from Uint8Array - default: false, - tag: 'tag:yaml.org,2002:binary', - /** - * Returns a Buffer in node and an Uint8Array in browsers - * - * To use the resulting buffer as an image, you'll want to do something like: - * - * const blob = new Blob([buffer], { type: 'image/jpeg' }) - * document.querySelector('#photo').src = URL.createObjectURL(blob) - */ - resolve(src, onError) { - if (typeof Buffer === 'function') { - return Buffer.from(src, 'base64'); - } - else if (typeof atob === 'function') { - // On IE 11, atob() can't handle newlines - const str = atob(src.replace(/[\n\r]/g, '')); - const buffer = new Uint8Array(str.length); - for (let i = 0; i < str.length; ++i) - buffer[i] = str.charCodeAt(i); - return buffer; - } - else { - onError('This environment does not support reading binary tags; either Buffer or atob is required'); - return src; - } - }, - stringify({ comment, type, value }, ctx, onComment, onChompKeep) { - const buf = value; // checked earlier by binary.identify() - let str; - if (typeof Buffer === 'function') { - str = - buf instanceof Buffer - ? buf.toString('base64') - : Buffer.from(buf.buffer).toString('base64'); - } - else if (typeof btoa === 'function') { - let s = ''; - for (let i = 0; i < buf.length; ++i) - s += String.fromCharCode(buf[i]); - str = btoa(s); - } - else { - throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); - } - if (!type) - type = Scalar.Scalar.BLOCK_LITERAL; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); - const n = Math.ceil(str.length / lineWidth); - const lines = new Array(n); - for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { - lines[i] = str.substr(o, lineWidth); - } - str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\n' : ' '); - } - return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); - } -}; - -exports.binary = binary; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts b/bin/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts deleted file mode 100644 index 587b55b..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/bool.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const trueTag: ScalarTag & { - test: RegExp; -}; -export declare const falseTag: ScalarTag & { - test: RegExp; -}; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/bool.js b/bin/node_modules/yaml/dist/schema/yaml-1.1/bool.js deleted file mode 100644 index d987952..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/bool.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); - -function boolStringify({ value, source }, ctx) { - const boolObj = value ? trueTag : falseTag; - if (source && boolObj.test.test(source)) - return source; - return value ? ctx.options.trueStr : ctx.options.falseStr; -} -const trueTag = { - identify: value => value === true, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, - resolve: () => new Scalar.Scalar(true), - stringify: boolStringify -}; -const falseTag = { - identify: value => value === false, - default: true, - tag: 'tag:yaml.org,2002:bool', - test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/, - resolve: () => new Scalar.Scalar(false), - stringify: boolStringify -}; - -exports.falseTag = falseTag; -exports.trueTag = trueTag; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts b/bin/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts deleted file mode 100644 index 22f0249..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/float.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const floatNaN: ScalarTag; -export declare const floatExp: ScalarTag; -export declare const float: ScalarTag; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/float.js b/bin/node_modules/yaml/dist/schema/yaml-1.1/float.js deleted file mode 100644 index 39f1eb0..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/float.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict'; - -var Scalar = require('../../nodes/Scalar.js'); -var stringifyNumber = require('../../stringify/stringifyNumber.js'); - -const floatNaN = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/, - resolve: (str) => str.slice(-3).toLowerCase() === 'nan' - ? NaN - : str[0] === '-' - ? Number.NEGATIVE_INFINITY - : Number.POSITIVE_INFINITY, - stringify: stringifyNumber.stringifyNumber -}; -const floatExp = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'EXP', - test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, - resolve: (str) => parseFloat(str.replace(/_/g, '')), - stringify(node) { - const num = Number(node.value); - return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); - } -}; -const float = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, - resolve(str) { - const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, ''))); - const dot = str.indexOf('.'); - if (dot !== -1) { - const f = str.substring(dot + 1).replace(/_/g, ''); - if (f[f.length - 1] === '0') - node.minFractionDigits = f.length; - } - return node; - }, - stringify: stringifyNumber.stringifyNumber -}; - -exports.float = float; -exports.floatExp = floatExp; -exports.floatNaN = floatNaN; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts b/bin/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts deleted file mode 100644 index 3d92f37..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/int.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const intBin: ScalarTag; -export declare const intOct: ScalarTag; -export declare const int: ScalarTag; -export declare const intHex: ScalarTag; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/int.js b/bin/node_modules/yaml/dist/schema/yaml-1.1/int.js deleted file mode 100644 index fdf47ca..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/int.js +++ /dev/null @@ -1,76 +0,0 @@ -'use strict'; - -var stringifyNumber = require('../../stringify/stringifyNumber.js'); - -const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); -function intResolve(str, offset, radix, { intAsBigInt }) { - const sign = str[0]; - if (sign === '-' || sign === '+') - offset += 1; - str = str.substring(offset).replace(/_/g, ''); - if (intAsBigInt) { - switch (radix) { - case 2: - str = `0b${str}`; - break; - case 8: - str = `0o${str}`; - break; - case 16: - str = `0x${str}`; - break; - } - const n = BigInt(str); - return sign === '-' ? BigInt(-1) * n : n; - } - const n = parseInt(str, radix); - return sign === '-' ? -1 * n : n; -} -function intStringify(node, radix, prefix) { - const { value } = node; - if (intIdentify(value)) { - const str = value.toString(radix); - return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; - } - return stringifyNumber.stringifyNumber(node); -} -const intBin = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'BIN', - test: /^[-+]?0b[0-1_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), - stringify: node => intStringify(node, 2, '0b') -}; -const intOct = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'OCT', - test: /^[-+]?0[0-7_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), - stringify: node => intStringify(node, 8, '0') -}; -const int = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - test: /^[-+]?[0-9][0-9_]*$/, - resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), - stringify: stringifyNumber.stringifyNumber -}; -const intHex = { - identify: intIdentify, - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'HEX', - test: /^[-+]?0x[0-9a-fA-F_]+$/, - resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), - stringify: node => intStringify(node, 16, '0x') -}; - -exports.int = int; -exports.intBin = intBin; -exports.intHex = intHex; -exports.intOct = intOct; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts b/bin/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts deleted file mode 100644 index 12727dc..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/omap.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { ToJSContext } from '../../nodes/toJS.js'; -import { YAMLSeq } from '../../nodes/YAMLSeq.js'; -import { CreateNodeContext } from '../../util.js'; -import type { Schema } from '../Schema.js'; -import { CollectionTag } from '../types.js'; -export declare class YAMLOMap extends YAMLSeq { - static tag: string; - constructor(); - add: (pair: import("../../index.js").Pair | { - key: any; - value: any; - }, overwrite?: boolean | undefined) => void; - delete: (key: unknown) => boolean; - get: { - (key: unknown, keepScalar: true): import("../../index.js").Scalar | undefined; - (key: unknown, keepScalar?: false | undefined): any; - (key: unknown, keepScalar?: boolean | undefined): any; - }; - has: (key: unknown) => boolean; - set: (key: any, value: any) => void; - /** - * If `ctx` is given, the return type is actually `Map`, - * but TypeScript won't allow widening the signature of a child method. - */ - toJSON(_?: unknown, ctx?: ToJSContext): unknown[]; - static from(schema: Schema, iterable: unknown, ctx: CreateNodeContext): YAMLOMap; -} -export declare const omap: CollectionTag; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/omap.js b/bin/node_modules/yaml/dist/schema/yaml-1.1/omap.js deleted file mode 100644 index 3ca141d..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/omap.js +++ /dev/null @@ -1,77 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var toJS = require('../../nodes/toJS.js'); -var YAMLMap = require('../../nodes/YAMLMap.js'); -var YAMLSeq = require('../../nodes/YAMLSeq.js'); -var pairs = require('./pairs.js'); - -class YAMLOMap extends YAMLSeq.YAMLSeq { - constructor() { - super(); - this.add = YAMLMap.YAMLMap.prototype.add.bind(this); - this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this); - this.get = YAMLMap.YAMLMap.prototype.get.bind(this); - this.has = YAMLMap.YAMLMap.prototype.has.bind(this); - this.set = YAMLMap.YAMLMap.prototype.set.bind(this); - this.tag = YAMLOMap.tag; - } - /** - * If `ctx` is given, the return type is actually `Map`, - * but TypeScript won't allow widening the signature of a child method. - */ - toJSON(_, ctx) { - if (!ctx) - return super.toJSON(_); - const map = new Map(); - if (ctx?.onCreate) - ctx.onCreate(map); - for (const pair of this.items) { - let key, value; - if (identity.isPair(pair)) { - key = toJS.toJS(pair.key, '', ctx); - value = toJS.toJS(pair.value, key, ctx); - } - else { - key = toJS.toJS(pair, '', ctx); - } - if (map.has(key)) - throw new Error('Ordered maps must not include duplicate keys'); - map.set(key, value); - } - return map; - } - static from(schema, iterable, ctx) { - const pairs$1 = pairs.createPairs(schema, iterable, ctx); - const omap = new this(); - omap.items = pairs$1.items; - return omap; - } -} -YAMLOMap.tag = 'tag:yaml.org,2002:omap'; -const omap = { - collection: 'seq', - identify: value => value instanceof Map, - nodeClass: YAMLOMap, - default: false, - tag: 'tag:yaml.org,2002:omap', - resolve(seq, onError) { - const pairs$1 = pairs.resolvePairs(seq, onError); - const seenKeys = []; - for (const { key } of pairs$1.items) { - if (identity.isScalar(key)) { - if (seenKeys.includes(key.value)) { - onError(`Ordered maps must not include duplicate keys: ${key.value}`); - } - else { - seenKeys.push(key.value); - } - } - } - return Object.assign(new YAMLOMap(), pairs$1); - }, - createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx) -}; - -exports.YAMLOMap = YAMLOMap; -exports.omap = omap; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts b/bin/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts deleted file mode 100644 index 20bb907..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/pairs.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { CreateNodeContext } from '../../doc/createNode.js'; -import type { ParsedNode } from '../../nodes/Node.js'; -import { Pair } from '../../nodes/Pair.js'; -import { YAMLMap } from '../../nodes/YAMLMap.js'; -import { YAMLSeq } from '../../nodes/YAMLSeq.js'; -import type { Schema } from '../../schema/Schema.js'; -import type { CollectionTag } from '../types.js'; -export declare function resolvePairs(seq: YAMLSeq.Parsed> | YAMLMap.Parsed, onError: (message: string) => void): YAMLSeq.Parsed>; -export declare function createPairs(schema: Schema, iterable: unknown, ctx: CreateNodeContext): YAMLSeq; -export declare const pairs: CollectionTag; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/pairs.js b/bin/node_modules/yaml/dist/schema/yaml-1.1/pairs.js deleted file mode 100644 index aa32e0f..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/pairs.js +++ /dev/null @@ -1,82 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var Pair = require('../../nodes/Pair.js'); -var Scalar = require('../../nodes/Scalar.js'); -var YAMLSeq = require('../../nodes/YAMLSeq.js'); - -function resolvePairs(seq, onError) { - if (identity.isSeq(seq)) { - for (let i = 0; i < seq.items.length; ++i) { - let item = seq.items[i]; - if (identity.isPair(item)) - continue; - else if (identity.isMap(item)) { - if (item.items.length > 1) - onError('Each pair must have its own sequence indicator'); - const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null)); - if (item.commentBefore) - pair.key.commentBefore = pair.key.commentBefore - ? `${item.commentBefore}\n${pair.key.commentBefore}` - : item.commentBefore; - if (item.comment) { - const cn = pair.value ?? pair.key; - cn.comment = cn.comment - ? `${item.comment}\n${cn.comment}` - : item.comment; - } - item = pair; - } - seq.items[i] = identity.isPair(item) ? item : new Pair.Pair(item); - } - } - else - onError('Expected a sequence for this tag'); - return seq; -} -function createPairs(schema, iterable, ctx) { - const { replacer } = ctx; - const pairs = new YAMLSeq.YAMLSeq(schema); - pairs.tag = 'tag:yaml.org,2002:pairs'; - let i = 0; - if (iterable && Symbol.iterator in Object(iterable)) - for (let it of iterable) { - if (typeof replacer === 'function') - it = replacer.call(iterable, String(i++), it); - let key, value; - if (Array.isArray(it)) { - if (it.length === 2) { - key = it[0]; - value = it[1]; - } - else - throw new TypeError(`Expected [key, value] tuple: ${it}`); - } - else if (it && it instanceof Object) { - const keys = Object.keys(it); - if (keys.length === 1) { - key = keys[0]; - value = it[key]; - } - else { - throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`); - } - } - else { - key = it; - } - pairs.items.push(Pair.createPair(key, value, ctx)); - } - return pairs; -} -const pairs = { - collection: 'seq', - default: false, - tag: 'tag:yaml.org,2002:pairs', - resolve: resolvePairs, - createNode: createPairs -}; - -exports.createPairs = createPairs; -exports.pairs = pairs; -exports.resolvePairs = resolvePairs; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts b/bin/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts deleted file mode 100644 index f5bdd21..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/schema.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare const schema: (import("../types.js").CollectionTag | import("../types.js").ScalarTag)[]; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/schema.js b/bin/node_modules/yaml/dist/schema/yaml-1.1/schema.js deleted file mode 100644 index c04270a..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/schema.js +++ /dev/null @@ -1,39 +0,0 @@ -'use strict'; - -var map = require('../common/map.js'); -var _null = require('../common/null.js'); -var seq = require('../common/seq.js'); -var string = require('../common/string.js'); -var binary = require('./binary.js'); -var bool = require('./bool.js'); -var float = require('./float.js'); -var int = require('./int.js'); -var omap = require('./omap.js'); -var pairs = require('./pairs.js'); -var set = require('./set.js'); -var timestamp = require('./timestamp.js'); - -const schema = [ - map.map, - seq.seq, - string.string, - _null.nullTag, - bool.trueTag, - bool.falseTag, - int.intBin, - int.intOct, - int.int, - int.intHex, - float.floatNaN, - float.floatExp, - float.float, - binary.binary, - omap.omap, - pairs.pairs, - set.set, - timestamp.intTime, - timestamp.floatTime, - timestamp.timestamp -]; - -exports.schema = schema; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts b/bin/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts deleted file mode 100644 index 2054fb7..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/set.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Pair } from '../../nodes/Pair.js'; -import { Scalar } from '../../nodes/Scalar.js'; -import { ToJSContext } from '../../nodes/toJS.js'; -import { YAMLMap } from '../../nodes/YAMLMap.js'; -import type { Schema } from '../../schema/Schema.js'; -import type { StringifyContext } from '../../stringify/stringify.js'; -import { CreateNodeContext } from '../../util.js'; -import type { CollectionTag } from '../types.js'; -export declare class YAMLSet extends YAMLMap | null> { - static tag: string; - constructor(schema?: Schema); - add(key: T | Pair | null> | { - key: T; - value: Scalar | null; - }): void; - /** - * If `keepPair` is `true`, returns the Pair matching `key`. - * Otherwise, returns the value of that Pair's key. - */ - get(key: unknown, keepPair?: boolean): any; - set(key: T, value: boolean): void; - /** @deprecated Will throw; `value` must be boolean */ - set(key: T, value: null): void; - toJSON(_?: unknown, ctx?: ToJSContext): any; - toString(ctx?: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; - static from(schema: Schema, iterable: unknown, ctx: CreateNodeContext): YAMLSet; -} -export declare const set: CollectionTag; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/set.js b/bin/node_modules/yaml/dist/schema/yaml-1.1/set.js deleted file mode 100644 index 650c250..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/set.js +++ /dev/null @@ -1,96 +0,0 @@ -'use strict'; - -var identity = require('../../nodes/identity.js'); -var Pair = require('../../nodes/Pair.js'); -var YAMLMap = require('../../nodes/YAMLMap.js'); - -class YAMLSet extends YAMLMap.YAMLMap { - constructor(schema) { - super(schema); - this.tag = YAMLSet.tag; - } - add(key) { - let pair; - if (identity.isPair(key)) - pair = key; - else if (key && - typeof key === 'object' && - 'key' in key && - 'value' in key && - key.value === null) - pair = new Pair.Pair(key.key, null); - else - pair = new Pair.Pair(key, null); - const prev = YAMLMap.findPair(this.items, pair.key); - if (!prev) - this.items.push(pair); - } - /** - * If `keepPair` is `true`, returns the Pair matching `key`. - * Otherwise, returns the value of that Pair's key. - */ - get(key, keepPair) { - const pair = YAMLMap.findPair(this.items, key); - return !keepPair && identity.isPair(pair) - ? identity.isScalar(pair.key) - ? pair.key.value - : pair.key - : pair; - } - set(key, value) { - if (typeof value !== 'boolean') - throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); - const prev = YAMLMap.findPair(this.items, key); - if (prev && !value) { - this.items.splice(this.items.indexOf(prev), 1); - } - else if (!prev && value) { - this.items.push(new Pair.Pair(key)); - } - } - toJSON(_, ctx) { - return super.toJSON(_, ctx, Set); - } - toString(ctx, onComment, onChompKeep) { - if (!ctx) - return JSON.stringify(this); - if (this.hasAllNullValues(true)) - return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); - else - throw new Error('Set items must all have null values'); - } - static from(schema, iterable, ctx) { - const { replacer } = ctx; - const set = new this(schema); - if (iterable && Symbol.iterator in Object(iterable)) - for (let value of iterable) { - if (typeof replacer === 'function') - value = replacer.call(iterable, value, value); - set.items.push(Pair.createPair(value, null, ctx)); - } - return set; - } -} -YAMLSet.tag = 'tag:yaml.org,2002:set'; -const set = { - collection: 'map', - identify: value => value instanceof Set, - nodeClass: YAMLSet, - default: false, - tag: 'tag:yaml.org,2002:set', - createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx), - resolve(map, onError) { - if (identity.isMap(map)) { - if (map.hasAllNullValues(true)) - return Object.assign(new YAMLSet(), map); - else - onError('Set items must all have null values'); - } - else - onError('Expected a mapping for this tag'); - return map; - } -}; - -exports.YAMLSet = YAMLSet; -exports.set = set; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts b/bin/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts deleted file mode 100644 index 0c1d2d4..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/timestamp.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { ScalarTag } from '../types.js'; -export declare const intTime: ScalarTag; -export declare const floatTime: ScalarTag; -export declare const timestamp: ScalarTag & { - test: RegExp; -}; diff --git a/bin/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js b/bin/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js deleted file mode 100644 index c0b29e8..0000000 --- a/bin/node_modules/yaml/dist/schema/yaml-1.1/timestamp.js +++ /dev/null @@ -1,105 +0,0 @@ -'use strict'; - -var stringifyNumber = require('../../stringify/stringifyNumber.js'); - -/** Internal types handle bigint as number, because TS can't figure it out. */ -function parseSexagesimal(str, asBigInt) { - const sign = str[0]; - const parts = sign === '-' || sign === '+' ? str.substring(1) : str; - const num = (n) => asBigInt ? BigInt(n) : Number(n); - const res = parts - .replace(/_/g, '') - .split(':') - .reduce((res, p) => res * num(60) + num(p), num(0)); - return (sign === '-' ? num(-1) * res : res); -} -/** - * hhhh:mm:ss.sss - * - * Internal types handle bigint as number, because TS can't figure it out. - */ -function stringifySexagesimal(node) { - let { value } = node; - let num = (n) => n; - if (typeof value === 'bigint') - num = n => BigInt(n); - else if (isNaN(value) || !isFinite(value)) - return stringifyNumber.stringifyNumber(node); - let sign = ''; - if (value < 0) { - sign = '-'; - value *= num(-1); - } - const _60 = num(60); - const parts = [value % _60]; // seconds, including ms - if (value < 60) { - parts.unshift(0); // at least one : is required - } - else { - value = (value - parts[0]) / _60; - parts.unshift(value % _60); // minutes - if (value >= 60) { - value = (value - parts[0]) / _60; - parts.unshift(value); // hours - } - } - return (sign + - parts - .map(n => String(n).padStart(2, '0')) - .join(':') - .replace(/000000\d*$/, '') // % 60 may introduce error - ); -} -const intTime = { - identify: value => typeof value === 'bigint' || Number.isInteger(value), - default: true, - tag: 'tag:yaml.org,2002:int', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, - resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), - stringify: stringifySexagesimal -}; -const floatTime = { - identify: value => typeof value === 'number', - default: true, - tag: 'tag:yaml.org,2002:float', - format: 'TIME', - test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, - resolve: str => parseSexagesimal(str, false), - stringify: stringifySexagesimal -}; -const timestamp = { - identify: value => value instanceof Date, - default: true, - tag: 'tag:yaml.org,2002:timestamp', - // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part - // may be omitted altogether, resulting in a date format. In such a case, the time part is - // assumed to be 00:00:00Z (start of day, UTC). - test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd - '(?:' + // time is optional - '(?:t|T|[ \\t]+)' + // t | T | whitespace - '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? - '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 - ')?$'), - resolve(str) { - const match = str.match(timestamp.test); - if (!match) - throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); - const [, year, month, day, hour, minute, second] = match.map(Number); - const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; - let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); - const tz = match[8]; - if (tz && tz !== 'Z') { - let d = parseSexagesimal(tz, false); - if (Math.abs(d) < 30) - d *= 60; - date -= 60000 * d; - } - return new Date(date); - }, - stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '') -}; - -exports.floatTime = floatTime; -exports.intTime = intTime; -exports.timestamp = timestamp; diff --git a/bin/node_modules/yaml/dist/stringify/foldFlowLines.d.ts b/bin/node_modules/yaml/dist/stringify/foldFlowLines.d.ts deleted file mode 100644 index aac3cac..0000000 --- a/bin/node_modules/yaml/dist/stringify/foldFlowLines.d.ts +++ /dev/null @@ -1,34 +0,0 @@ -export declare const FOLD_FLOW = "flow"; -export declare const FOLD_BLOCK = "block"; -export declare const FOLD_QUOTED = "quoted"; -/** - * `'block'` prevents more-indented lines from being folded; - * `'quoted'` allows for `\` escapes, including escaped newlines - */ -export type FoldMode = 'flow' | 'block' | 'quoted'; -export interface FoldOptions { - /** - * Accounts for leading contents on the first line, defaulting to - * `indent.length` - */ - indentAtStart?: number; - /** Default: `80` */ - lineWidth?: number; - /** - * Allow highly indented lines to stretch the line width or indent content - * from the start. - * - * Default: `20` - */ - minContentWidth?: number; - /** Called once if the text is folded */ - onFold?: () => void; - /** Called once if any line of text exceeds lineWidth characters */ - onOverflow?: () => void; -} -/** - * Tries to keep input at up to `lineWidth` characters, splitting only on spaces - * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are - * terminated with `\n` and started with `indent`. - */ -export declare function foldFlowLines(text: string, indent: string, mode?: FoldMode, { indentAtStart, lineWidth, minContentWidth, onFold, onOverflow }?: FoldOptions): string; diff --git a/bin/node_modules/yaml/dist/stringify/foldFlowLines.js b/bin/node_modules/yaml/dist/stringify/foldFlowLines.js deleted file mode 100644 index 829a560..0000000 --- a/bin/node_modules/yaml/dist/stringify/foldFlowLines.js +++ /dev/null @@ -1,149 +0,0 @@ -'use strict'; - -const FOLD_FLOW = 'flow'; -const FOLD_BLOCK = 'block'; -const FOLD_QUOTED = 'quoted'; -/** - * Tries to keep input at up to `lineWidth` characters, splitting only on spaces - * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are - * terminated with `\n` and started with `indent`. - */ -function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { - if (!lineWidth || lineWidth < 0) - return text; - const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); - if (text.length <= endStep) - return text; - const folds = []; - const escapedFolds = {}; - let end = lineWidth - indent.length; - if (typeof indentAtStart === 'number') { - if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) - folds.push(0); - else - end = lineWidth - indentAtStart; - } - let split = undefined; - let prev = undefined; - let overflow = false; - let i = -1; - let escStart = -1; - let escEnd = -1; - if (mode === FOLD_BLOCK) { - i = consumeMoreIndentedLines(text, i, indent.length); - if (i !== -1) - end = i + endStep; - } - for (let ch; (ch = text[(i += 1)]);) { - if (mode === FOLD_QUOTED && ch === '\\') { - escStart = i; - switch (text[i + 1]) { - case 'x': - i += 3; - break; - case 'u': - i += 5; - break; - case 'U': - i += 9; - break; - default: - i += 1; - } - escEnd = i; - } - if (ch === '\n') { - if (mode === FOLD_BLOCK) - i = consumeMoreIndentedLines(text, i, indent.length); - end = i + indent.length + endStep; - split = undefined; - } - else { - if (ch === ' ' && - prev && - prev !== ' ' && - prev !== '\n' && - prev !== '\t') { - // space surrounded by non-space can be replaced with newline + indent - const next = text[i + 1]; - if (next && next !== ' ' && next !== '\n' && next !== '\t') - split = i; - } - if (i >= end) { - if (split) { - folds.push(split); - end = split + endStep; - split = undefined; - } - else if (mode === FOLD_QUOTED) { - // white-space collected at end may stretch past lineWidth - while (prev === ' ' || prev === '\t') { - prev = ch; - ch = text[(i += 1)]; - overflow = true; - } - // Account for newline escape, but don't break preceding escape - const j = i > escEnd + 1 ? i - 2 : escStart - 1; - // Bail out if lineWidth & minContentWidth are shorter than an escape string - if (escapedFolds[j]) - return text; - folds.push(j); - escapedFolds[j] = true; - end = j + endStep; - split = undefined; - } - else { - overflow = true; - } - } - } - prev = ch; - } - if (overflow && onOverflow) - onOverflow(); - if (folds.length === 0) - return text; - if (onFold) - onFold(); - let res = text.slice(0, folds[0]); - for (let i = 0; i < folds.length; ++i) { - const fold = folds[i]; - const end = folds[i + 1] || text.length; - if (fold === 0) - res = `\n${indent}${text.slice(0, end)}`; - else { - if (mode === FOLD_QUOTED && escapedFolds[fold]) - res += `${text[fold]}\\`; - res += `\n${indent}${text.slice(fold + 1, end)}`; - } - } - return res; -} -/** - * Presumes `i + 1` is at the start of a line - * @returns index of last newline in more-indented block - */ -function consumeMoreIndentedLines(text, i, indent) { - let end = i; - let start = i + 1; - let ch = text[start]; - while (ch === ' ' || ch === '\t') { - if (i < start + indent) { - ch = text[++i]; - } - else { - do { - ch = text[++i]; - } while (ch && ch !== '\n'); - end = i; - start = i + 1; - ch = text[start]; - } - } - return end; -} - -exports.FOLD_BLOCK = FOLD_BLOCK; -exports.FOLD_FLOW = FOLD_FLOW; -exports.FOLD_QUOTED = FOLD_QUOTED; -exports.foldFlowLines = foldFlowLines; diff --git a/bin/node_modules/yaml/dist/stringify/stringify.d.ts b/bin/node_modules/yaml/dist/stringify/stringify.d.ts deleted file mode 100644 index f408b75..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringify.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { Alias } from '../nodes/Alias.js'; -import type { ToStringOptions } from '../options.js'; -export type StringifyContext = { - actualString?: boolean; - allNullValues?: boolean; - anchors: Set; - doc: Document; - forceBlockIndent?: boolean; - implicitKey?: boolean; - indent: string; - indentStep: string; - indentAtStart?: number; - inFlow: boolean | null; - inStringifyKey?: boolean; - flowCollectionPadding: string; - options: Readonly>>; - resolvedAliases?: Set; -}; -export declare function createStringifyContext(doc: Document, options: ToStringOptions): StringifyContext; -export declare function stringify(item: unknown, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; diff --git a/bin/node_modules/yaml/dist/stringify/stringify.js b/bin/node_modules/yaml/dist/stringify/stringify.js deleted file mode 100644 index 1b472a6..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringify.js +++ /dev/null @@ -1,127 +0,0 @@ -'use strict'; - -var anchors = require('../doc/anchors.js'); -var identity = require('../nodes/identity.js'); -var stringifyComment = require('./stringifyComment.js'); -var stringifyString = require('./stringifyString.js'); - -function createStringifyContext(doc, options) { - const opt = Object.assign({ - blockQuote: true, - commentString: stringifyComment.stringifyComment, - defaultKeyType: null, - defaultStringType: 'PLAIN', - directives: null, - doubleQuotedAsJSON: false, - doubleQuotedMinMultiLineLength: 40, - falseStr: 'false', - flowCollectionPadding: true, - indentSeq: true, - lineWidth: 80, - minContentWidth: 20, - nullStr: 'null', - simpleKeys: false, - singleQuote: null, - trueStr: 'true', - verifyAliasOrder: true - }, doc.schema.toStringOptions, options); - let inFlow; - switch (opt.collectionStyle) { - case 'block': - inFlow = false; - break; - case 'flow': - inFlow = true; - break; - default: - inFlow = null; - } - return { - anchors: new Set(), - doc, - flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '', - indent: '', - indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', - inFlow, - options: opt - }; -} -function getTagObject(tags, item) { - if (item.tag) { - const match = tags.filter(t => t.tag === item.tag); - if (match.length > 0) - return match.find(t => t.format === item.format) ?? match[0]; - } - let tagObj = undefined; - let obj; - if (identity.isScalar(item)) { - obj = item.value; - const match = tags.filter(t => t.identify?.(obj)); - tagObj = - match.find(t => t.format === item.format) ?? match.find(t => !t.format); - } - else { - obj = item; - tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); - } - if (!tagObj) { - const name = obj?.constructor?.name ?? typeof obj; - throw new Error(`Tag not resolved for ${name} value`); - } - return tagObj; -} -// needs to be called before value stringifier to allow for circular anchor refs -function stringifyProps(node, tagObj, { anchors: anchors$1, doc }) { - if (!doc.directives) - return ''; - const props = []; - const anchor = (identity.isScalar(node) || identity.isCollection(node)) && node.anchor; - if (anchor && anchors.anchorIsValid(anchor)) { - anchors$1.add(anchor); - props.push(`&${anchor}`); - } - const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; - if (tag) - props.push(doc.directives.tagString(tag)); - return props.join(' '); -} -function stringify(item, ctx, onComment, onChompKeep) { - if (identity.isPair(item)) - return item.toString(ctx, onComment, onChompKeep); - if (identity.isAlias(item)) { - if (ctx.doc.directives) - return item.toString(ctx); - if (ctx.resolvedAliases?.has(item)) { - throw new TypeError(`Cannot stringify circular structure without alias nodes`); - } - else { - if (ctx.resolvedAliases) - ctx.resolvedAliases.add(item); - else - ctx.resolvedAliases = new Set([item]); - item = item.resolve(ctx.doc); - } - } - let tagObj = undefined; - const node = identity.isNode(item) - ? item - : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); - if (!tagObj) - tagObj = getTagObject(ctx.doc.schema.tags, node); - const props = stringifyProps(node, tagObj, ctx); - if (props.length > 0) - ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; - const str = typeof tagObj.stringify === 'function' - ? tagObj.stringify(node, ctx, onComment, onChompKeep) - : identity.isScalar(node) - ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep) - : node.toString(ctx, onComment, onChompKeep); - if (!props) - return str; - return identity.isScalar(node) || str[0] === '{' || str[0] === '[' - ? `${props} ${str}` - : `${props}\n${ctx.indent}${str}`; -} - -exports.createStringifyContext = createStringifyContext; -exports.stringify = stringify; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyCollection.d.ts b/bin/node_modules/yaml/dist/stringify/stringifyCollection.d.ts deleted file mode 100644 index 207d703..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyCollection.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Collection } from '../nodes/Collection.js'; -import { StringifyContext } from './stringify.js'; -interface StringifyCollectionOptions { - blockItemPrefix: string; - flowChars: { - start: '{'; - end: '}'; - } | { - start: '['; - end: ']'; - }; - itemIndent: string; - onChompKeep?: () => void; - onComment?: () => void; -} -export declare function stringifyCollection(collection: Readonly, ctx: StringifyContext, options: StringifyCollectionOptions): string; -export {}; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyCollection.js b/bin/node_modules/yaml/dist/stringify/stringifyCollection.js deleted file mode 100644 index 6efffc5..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyCollection.js +++ /dev/null @@ -1,145 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var stringify = require('./stringify.js'); -var stringifyComment = require('./stringifyComment.js'); - -function stringifyCollection(collection, ctx, options) { - const flow = ctx.inFlow ?? collection.flow; - const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; - return stringify(collection, ctx, options); -} -function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { - const { indent, options: { commentString } } = ctx; - const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); - let chompKeep = false; // flag for the preceding node's status - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (identity.isNode(item)) { - if (!chompKeep && item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, chompKeep); - if (item.comment) - comment = item.comment; - } - else if (identity.isPair(item)) { - const ik = identity.isNode(item.key) ? item.key : null; - if (ik) { - if (!chompKeep && ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); - } - } - chompKeep = false; - let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true)); - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (chompKeep && comment) - chompKeep = false; - lines.push(blockItemPrefix + str); - } - let str; - if (lines.length === 0) { - str = flowChars.start + flowChars.end; - } - else { - str = lines[0]; - for (let i = 1; i < lines.length; ++i) { - const line = lines[i]; - str += line ? `\n${indent}${line}` : '\n'; - } - } - if (comment) { - str += '\n' + stringifyComment.indentComment(commentString(comment), indent); - if (onComment) - onComment(); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; -} -function stringifyFlowCollection({ items }, ctx, { flowChars, itemIndent }) { - const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx; - itemIndent += indentStep; - const itemCtx = Object.assign({}, ctx, { - indent: itemIndent, - inFlow: true, - type: null - }); - let reqNewline = false; - let linesAtValue = 0; - const lines = []; - for (let i = 0; i < items.length; ++i) { - const item = items[i]; - let comment = null; - if (identity.isNode(item)) { - if (item.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, item.commentBefore, false); - if (item.comment) - comment = item.comment; - } - else if (identity.isPair(item)) { - const ik = identity.isNode(item.key) ? item.key : null; - if (ik) { - if (ik.spaceBefore) - lines.push(''); - addCommentBefore(ctx, lines, ik.commentBefore, false); - if (ik.comment) - reqNewline = true; - } - const iv = identity.isNode(item.value) ? item.value : null; - if (iv) { - if (iv.comment) - comment = iv.comment; - if (iv.commentBefore) - reqNewline = true; - } - else if (item.value == null && ik?.comment) { - comment = ik.comment; - } - } - if (comment) - reqNewline = true; - let str = stringify.stringify(item, itemCtx, () => (comment = null)); - if (i < items.length - 1) - str += ','; - if (comment) - str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); - if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) - reqNewline = true; - lines.push(str); - linesAtValue = lines.length; - } - const { start, end } = flowChars; - if (lines.length === 0) { - return start + end; - } - else { - if (!reqNewline) { - const len = lines.reduce((sum, line) => sum + line.length + 2, 2); - reqNewline = ctx.options.lineWidth > 0 && len > ctx.options.lineWidth; - } - if (reqNewline) { - let str = start; - for (const line of lines) - str += line ? `\n${indentStep}${indent}${line}` : '\n'; - return `${str}\n${indent}${end}`; - } - else { - return `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`; - } - } -} -function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { - if (comment && chompKeep) - comment = comment.replace(/^\n+/, ''); - if (comment) { - const ic = stringifyComment.indentComment(commentString(comment), indent); - lines.push(ic.trimStart()); // Avoid double indent on first line - } -} - -exports.stringifyCollection = stringifyCollection; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyComment.d.ts b/bin/node_modules/yaml/dist/stringify/stringifyComment.d.ts deleted file mode 100644 index 9fcf48d..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyComment.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -/** - * Stringifies a comment. - * - * Empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ -export declare const stringifyComment: (str: string) => string; -export declare function indentComment(comment: string, indent: string): string; -export declare const lineComment: (str: string, indent: string, comment: string) => string; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyComment.js b/bin/node_modules/yaml/dist/stringify/stringifyComment.js deleted file mode 100644 index 26bf361..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyComment.js +++ /dev/null @@ -1,24 +0,0 @@ -'use strict'; - -/** - * Stringifies a comment. - * - * Empty comment lines are left empty, - * lines consisting of a single space are replaced by `#`, - * and all other lines are prefixed with a `#`. - */ -const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); -function indentComment(comment, indent) { - if (/^\n+$/.test(comment)) - return comment.substring(1); - return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; -} -const lineComment = (str, indent, comment) => str.endsWith('\n') - ? indentComment(comment, indent) - : comment.includes('\n') - ? '\n' + indentComment(comment, indent) - : (str.endsWith(' ') ? '' : ' ') + comment; - -exports.indentComment = indentComment; -exports.lineComment = lineComment; -exports.stringifyComment = stringifyComment; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyDocument.d.ts b/bin/node_modules/yaml/dist/stringify/stringifyDocument.d.ts deleted file mode 100644 index 1eeb177..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyDocument.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { Document } from '../doc/Document.js'; -import type { Node } from '../nodes/Node.js'; -import type { ToStringOptions } from '../options.js'; -export declare function stringifyDocument(doc: Readonly>, options: ToStringOptions): string; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyDocument.js b/bin/node_modules/yaml/dist/stringify/stringifyDocument.js deleted file mode 100644 index fb9d73c..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyDocument.js +++ /dev/null @@ -1,87 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var stringify = require('./stringify.js'); -var stringifyComment = require('./stringifyComment.js'); - -function stringifyDocument(doc, options) { - const lines = []; - let hasDirectives = options.directives === true; - if (options.directives !== false && doc.directives) { - const dir = doc.directives.toString(doc); - if (dir) { - lines.push(dir); - hasDirectives = true; - } - else if (doc.directives.docStart) - hasDirectives = true; - } - if (hasDirectives) - lines.push('---'); - const ctx = stringify.createStringifyContext(doc, options); - const { commentString } = ctx.options; - if (doc.commentBefore) { - if (lines.length !== 1) - lines.unshift(''); - const cs = commentString(doc.commentBefore); - lines.unshift(stringifyComment.indentComment(cs, '')); - } - let chompKeep = false; - let contentComment = null; - if (doc.contents) { - if (identity.isNode(doc.contents)) { - if (doc.contents.spaceBefore && hasDirectives) - lines.push(''); - if (doc.contents.commentBefore) { - const cs = commentString(doc.contents.commentBefore); - lines.push(stringifyComment.indentComment(cs, '')); - } - // top-level block scalars need to be indented if followed by a comment - ctx.forceBlockIndent = !!doc.comment; - contentComment = doc.contents.comment; - } - const onChompKeep = contentComment ? undefined : () => (chompKeep = true); - let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep); - if (contentComment) - body += stringifyComment.lineComment(body, '', commentString(contentComment)); - if ((body[0] === '|' || body[0] === '>') && - lines[lines.length - 1] === '---') { - // Top-level block scalars with a preceding doc marker ought to use the - // same line for their header. - lines[lines.length - 1] = `--- ${body}`; - } - else - lines.push(body); - } - else { - lines.push(stringify.stringify(doc.contents, ctx)); - } - if (doc.directives?.docEnd) { - if (doc.comment) { - const cs = commentString(doc.comment); - if (cs.includes('\n')) { - lines.push('...'); - lines.push(stringifyComment.indentComment(cs, '')); - } - else { - lines.push(`... ${cs}`); - } - } - else { - lines.push('...'); - } - } - else { - let dc = doc.comment; - if (dc && chompKeep) - dc = dc.replace(/^\n+/, ''); - if (dc) { - if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') - lines.push(''); - lines.push(stringifyComment.indentComment(commentString(dc), '')); - } - } - return lines.join('\n') + '\n'; -} - -exports.stringifyDocument = stringifyDocument; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyNumber.d.ts b/bin/node_modules/yaml/dist/stringify/stringifyNumber.d.ts deleted file mode 100644 index 3c14df1..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyNumber.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { Scalar } from '../nodes/Scalar.js'; -export declare function stringifyNumber({ format, minFractionDigits, tag, value }: Scalar): string; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyNumber.js b/bin/node_modules/yaml/dist/stringify/stringifyNumber.js deleted file mode 100644 index 4118ff6..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyNumber.js +++ /dev/null @@ -1,26 +0,0 @@ -'use strict'; - -function stringifyNumber({ format, minFractionDigits, tag, value }) { - if (typeof value === 'bigint') - return String(value); - const num = typeof value === 'number' ? value : Number(value); - if (!isFinite(num)) - return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; - let n = JSON.stringify(value); - if (!format && - minFractionDigits && - (!tag || tag === 'tag:yaml.org,2002:float') && - /^\d/.test(n)) { - let i = n.indexOf('.'); - if (i < 0) { - i = n.length; - n += '.'; - } - let d = minFractionDigits - (n.length - i - 1); - while (d-- > 0) - n += '0'; - } - return n; -} - -exports.stringifyNumber = stringifyNumber; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyPair.d.ts b/bin/node_modules/yaml/dist/stringify/stringifyPair.d.ts deleted file mode 100644 index c512149..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyPair.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import type { Pair } from '../nodes/Pair.js'; -import { StringifyContext } from './stringify.js'; -export declare function stringifyPair({ key, value }: Readonly, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyPair.js b/bin/node_modules/yaml/dist/stringify/stringifyPair.js deleted file mode 100644 index 716ea9a..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyPair.js +++ /dev/null @@ -1,152 +0,0 @@ -'use strict'; - -var identity = require('../nodes/identity.js'); -var Scalar = require('../nodes/Scalar.js'); -var stringify = require('./stringify.js'); -var stringifyComment = require('./stringifyComment.js'); - -function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { - const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; - let keyComment = (identity.isNode(key) && key.comment) || null; - if (simpleKeys) { - if (keyComment) { - throw new Error('With simple keys, key nodes cannot have comments'); - } - if (identity.isCollection(key) || (!identity.isNode(key) && typeof key === 'object')) { - const msg = 'With simple keys, collection cannot be used as a key value'; - throw new Error(msg); - } - } - let explicitKey = !simpleKeys && - (!key || - (keyComment && value == null && !ctx.inFlow) || - identity.isCollection(key) || - (identity.isScalar(key) - ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL - : typeof key === 'object')); - ctx = Object.assign({}, ctx, { - allNullValues: false, - implicitKey: !explicitKey && (simpleKeys || !allNullValues), - indent: indent + indentStep - }); - let keyCommentDone = false; - let chompKeep = false; - let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); - if (!explicitKey && !ctx.inFlow && str.length > 1024) { - if (simpleKeys) - throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); - explicitKey = true; - } - if (ctx.inFlow) { - if (allNullValues || value == null) { - if (keyCommentDone && onComment) - onComment(); - return str === '' ? '?' : explicitKey ? `? ${str}` : str; - } - } - else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { - str = `? ${str}`; - if (keyComment && !keyCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - else if (chompKeep && onChompKeep) - onChompKeep(); - return str; - } - if (keyCommentDone) - keyComment = null; - if (explicitKey) { - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - str = `? ${str}\n${indent}:`; - } - else { - str = `${str}:`; - if (keyComment) - str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); - } - let vsb, vcb, valueComment; - if (identity.isNode(value)) { - vsb = !!value.spaceBefore; - vcb = value.commentBefore; - valueComment = value.comment; - } - else { - vsb = false; - vcb = null; - valueComment = null; - if (value && typeof value === 'object') - value = doc.createNode(value); - } - ctx.implicitKey = false; - if (!explicitKey && !keyComment && identity.isScalar(value)) - ctx.indentAtStart = str.length + 1; - chompKeep = false; - if (!indentSeq && - indentStep.length >= 2 && - !ctx.inFlow && - !explicitKey && - identity.isSeq(value) && - !value.flow && - !value.tag && - !value.anchor) { - // If indentSeq === false, consider '- ' as part of indentation where possible - ctx.indent = ctx.indent.substring(2); - } - let valueCommentDone = false; - const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); - let ws = ' '; - if (keyComment || vsb || vcb) { - ws = vsb ? '\n' : ''; - if (vcb) { - const cs = commentString(vcb); - ws += `\n${stringifyComment.indentComment(cs, ctx.indent)}`; - } - if (valueStr === '' && !ctx.inFlow) { - if (ws === '\n') - ws = '\n\n'; - } - else { - ws += `\n${ctx.indent}`; - } - } - else if (!explicitKey && identity.isCollection(value)) { - const vs0 = valueStr[0]; - const nl0 = valueStr.indexOf('\n'); - const hasNewline = nl0 !== -1; - const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0; - if (hasNewline || !flow) { - let hasPropsLine = false; - if (hasNewline && (vs0 === '&' || vs0 === '!')) { - let sp0 = valueStr.indexOf(' '); - if (vs0 === '&' && - sp0 !== -1 && - sp0 < nl0 && - valueStr[sp0 + 1] === '!') { - sp0 = valueStr.indexOf(' ', sp0 + 1); - } - if (sp0 === -1 || nl0 < sp0) - hasPropsLine = true; - } - if (!hasPropsLine) - ws = `\n${ctx.indent}`; - } - } - else if (valueStr === '' || valueStr[0] === '\n') { - ws = ''; - } - str += ws + valueStr; - if (ctx.inFlow) { - if (valueCommentDone && onComment) - onComment(); - } - else if (valueComment && !valueCommentDone) { - str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment)); - } - else if (chompKeep && onChompKeep) { - onChompKeep(); - } - return str; -} - -exports.stringifyPair = stringifyPair; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyString.d.ts b/bin/node_modules/yaml/dist/stringify/stringifyString.d.ts deleted file mode 100644 index 017cc4e..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyString.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Scalar } from '../nodes/Scalar.js'; -import type { StringifyContext } from './stringify.js'; -interface StringifyScalar { - value: string; - comment?: string | null; - type?: string; -} -export declare function stringifyString(item: Scalar | StringifyScalar, ctx: StringifyContext, onComment?: () => void, onChompKeep?: () => void): string; -export {}; diff --git a/bin/node_modules/yaml/dist/stringify/stringifyString.js b/bin/node_modules/yaml/dist/stringify/stringifyString.js deleted file mode 100644 index 339e331..0000000 --- a/bin/node_modules/yaml/dist/stringify/stringifyString.js +++ /dev/null @@ -1,330 +0,0 @@ -'use strict'; - -var Scalar = require('../nodes/Scalar.js'); -var foldFlowLines = require('./foldFlowLines.js'); - -const getFoldOptions = (ctx, isBlock) => ({ - indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart, - lineWidth: ctx.options.lineWidth, - minContentWidth: ctx.options.minContentWidth -}); -// Also checks for lines starting with %, as parsing the output as YAML 1.1 will -// presume that's starting a new document. -const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); -function lineLengthOverLimit(str, lineWidth, indentLength) { - if (!lineWidth || lineWidth < 0) - return false; - const limit = lineWidth - indentLength; - const strLen = str.length; - if (strLen <= limit) - return false; - for (let i = 0, start = 0; i < strLen; ++i) { - if (str[i] === '\n') { - if (i - start > limit) - return true; - start = i + 1; - if (strLen - start <= limit) - return false; - } - } - return true; -} -function doubleQuotedString(value, ctx) { - const json = JSON.stringify(value); - if (ctx.options.doubleQuotedAsJSON) - return json; - const { implicitKey } = ctx; - const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - let str = ''; - let start = 0; - for (let i = 0, ch = json[i]; ch; ch = json[++i]) { - if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { - // space before newline needs to be escaped to not be folded - str += json.slice(start, i) + '\\ '; - i += 1; - start = i; - ch = '\\'; - } - if (ch === '\\') - switch (json[i + 1]) { - case 'u': - { - str += json.slice(start, i); - const code = json.substr(i + 2, 4); - switch (code) { - case '0000': - str += '\\0'; - break; - case '0007': - str += '\\a'; - break; - case '000b': - str += '\\v'; - break; - case '001b': - str += '\\e'; - break; - case '0085': - str += '\\N'; - break; - case '00a0': - str += '\\_'; - break; - case '2028': - str += '\\L'; - break; - case '2029': - str += '\\P'; - break; - default: - if (code.substr(0, 2) === '00') - str += '\\x' + code.substr(2); - else - str += json.substr(i, 6); - } - i += 5; - start = i + 1; - } - break; - case 'n': - if (implicitKey || - json[i + 2] === '"' || - json.length < minMultiLineLength) { - i += 1; - } - else { - // folding will eat first newline - str += json.slice(start, i) + '\n\n'; - while (json[i + 2] === '\\' && - json[i + 3] === 'n' && - json[i + 4] !== '"') { - str += '\n'; - i += 2; - } - str += indent; - // space after newline needs to be escaped to not be folded - if (json[i + 2] === ' ') - str += '\\'; - i += 1; - start = i + 1; - } - break; - default: - i += 1; - } - } - str = start ? str + json.slice(start) : json; - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx, false)); -} -function singleQuotedString(value, ctx) { - if (ctx.options.singleQuote === false || - (ctx.implicitKey && value.includes('\n')) || - /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline - ) - return doubleQuotedString(value, ctx); - const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); - const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; - return ctx.implicitKey - ? res - : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false)); -} -function quotedString(value, ctx) { - const { singleQuote } = ctx.options; - let qs; - if (singleQuote === false) - qs = doubleQuotedString; - else { - const hasDouble = value.includes('"'); - const hasSingle = value.includes("'"); - if (hasDouble && !hasSingle) - qs = singleQuotedString; - else if (hasSingle && !hasDouble) - qs = doubleQuotedString; - else - qs = singleQuote ? singleQuotedString : doubleQuotedString; - } - return qs(value, ctx); -} -// The negative lookbehind avoids a polynomial search, -// but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind -let blockEndNewlines; -try { - blockEndNewlines = new RegExp('(^|(?\n'; - // determine chomping from whitespace at value end - let chomp; - let endStart; - for (endStart = value.length; endStart > 0; --endStart) { - const ch = value[endStart - 1]; - if (ch !== '\n' && ch !== '\t' && ch !== ' ') - break; - } - let end = value.substring(endStart); - const endNlPos = end.indexOf('\n'); - if (endNlPos === -1) { - chomp = '-'; // strip - } - else if (value === end || endNlPos !== end.length - 1) { - chomp = '+'; // keep - if (onChompKeep) - onChompKeep(); - } - else { - chomp = ''; // clip - } - if (end) { - value = value.slice(0, -end.length); - if (end[end.length - 1] === '\n') - end = end.slice(0, -1); - end = end.replace(blockEndNewlines, `$&${indent}`); - } - // determine indent indicator from whitespace at value start - let startWithSpace = false; - let startEnd; - let startNlPos = -1; - for (startEnd = 0; startEnd < value.length; ++startEnd) { - const ch = value[startEnd]; - if (ch === ' ') - startWithSpace = true; - else if (ch === '\n') - startNlPos = startEnd; - else - break; - } - let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); - if (start) { - value = value.substring(start.length); - start = start.replace(/\n+/g, `$&${indent}`); - } - const indentSize = indent ? '2' : '1'; // root is at -1 - let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp; - if (comment) { - header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); - if (onComment) - onComment(); - } - if (literal) { - value = value.replace(/\n+/g, `$&${indent}`); - return `${header}\n${indent}${start}${value}${end}`; - } - value = value - .replace(/\n+/g, '\n$&') - .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded - // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent - .replace(/\n+/g, `$&${indent}`); - const body = foldFlowLines.foldFlowLines(`${start}${value}${end}`, indent, foldFlowLines.FOLD_BLOCK, getFoldOptions(ctx, true)); - return `${header}\n${indent}${body}`; -} -function plainString(item, ctx, onComment, onChompKeep) { - const { type, value } = item; - const { actualString, implicitKey, indent, indentStep, inFlow } = ctx; - if ((implicitKey && value.includes('\n')) || - (inFlow && /[[\]{},]/.test(value))) { - return quotedString(value, ctx); - } - if (!value || - /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { - // not allowed: - // - empty string, '-' or '?' - // - start with an indicator character (except [?:-]) or /[?-] / - // - '\n ', ': ' or ' \n' anywhere - // - '#' not preceded by a non-space char - // - end with ' ' or ':' - return implicitKey || inFlow || !value.includes('\n') - ? quotedString(value, ctx) - : blockString(item, ctx, onComment, onChompKeep); - } - if (!implicitKey && - !inFlow && - type !== Scalar.Scalar.PLAIN && - value.includes('\n')) { - // Where allowed & type not set explicitly, prefer block style for multiline strings - return blockString(item, ctx, onComment, onChompKeep); - } - if (containsDocumentMarker(value)) { - if (indent === '') { - ctx.forceBlockIndent = true; - return blockString(item, ctx, onComment, onChompKeep); - } - else if (implicitKey && indent === indentStep) { - return quotedString(value, ctx); - } - } - const str = value.replace(/\n+/g, `$&\n${indent}`); - // Verify that output will be parsed as a string, as e.g. plain numbers and - // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), - // and others in v1.1. - if (actualString) { - const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); - const { compat, tags } = ctx.doc.schema; - if (tags.some(test) || compat?.some(test)) - return quotedString(value, ctx); - } - return implicitKey - ? str - : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false)); -} -function stringifyString(item, ctx, onComment, onChompKeep) { - const { implicitKey, inFlow } = ctx; - const ss = typeof item.value === 'string' - ? item - : Object.assign({}, item, { value: String(item.value) }); - let { type } = item; - if (type !== Scalar.Scalar.QUOTE_DOUBLE) { - // force double quotes on control characters & unpaired surrogates - if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) - type = Scalar.Scalar.QUOTE_DOUBLE; - } - const _stringify = (_type) => { - switch (_type) { - case Scalar.Scalar.BLOCK_FOLDED: - case Scalar.Scalar.BLOCK_LITERAL: - return implicitKey || inFlow - ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers - : blockString(ss, ctx, onComment, onChompKeep); - case Scalar.Scalar.QUOTE_DOUBLE: - return doubleQuotedString(ss.value, ctx); - case Scalar.Scalar.QUOTE_SINGLE: - return singleQuotedString(ss.value, ctx); - case Scalar.Scalar.PLAIN: - return plainString(ss, ctx, onComment, onChompKeep); - default: - return null; - } - }; - let res = _stringify(type); - if (res === null) { - const { defaultKeyType, defaultStringType } = ctx.options; - const t = (implicitKey && defaultKeyType) || defaultStringType; - res = _stringify(t); - if (res === null) - throw new Error(`Unsupported default string type ${t}`); - } - return res; -} - -exports.stringifyString = stringifyString; diff --git a/bin/node_modules/yaml/dist/test-events.d.ts b/bin/node_modules/yaml/dist/test-events.d.ts deleted file mode 100644 index d1a2348..0000000 --- a/bin/node_modules/yaml/dist/test-events.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -export declare function testEvents(src: string): { - events: string[]; - error: unknown; -}; diff --git a/bin/node_modules/yaml/dist/test-events.js b/bin/node_modules/yaml/dist/test-events.js deleted file mode 100644 index f38d336..0000000 --- a/bin/node_modules/yaml/dist/test-events.js +++ /dev/null @@ -1,134 +0,0 @@ -'use strict'; - -var identity = require('./nodes/identity.js'); -var publicApi = require('./public-api.js'); -var visit = require('./visit.js'); - -const scalarChar = { - BLOCK_FOLDED: '>', - BLOCK_LITERAL: '|', - PLAIN: ':', - QUOTE_DOUBLE: '"', - QUOTE_SINGLE: "'" -}; -function anchorExists(doc, anchor) { - let found = false; - visit.visit(doc, { - Value(_key, node) { - if (node.anchor === anchor) { - found = true; - return visit.visit.BREAK; - } - } - }); - return found; -} -// test harness for yaml-test-suite event tests -function testEvents(src) { - const docs = publicApi.parseAllDocuments(src); - const errDoc = docs.find(doc => doc.errors.length > 0); - const error = errDoc ? errDoc.errors[0].message : null; - const events = ['+STR']; - try { - for (let i = 0; i < docs.length; ++i) { - const doc = docs[i]; - let root = doc.contents; - if (Array.isArray(root)) - root = root[0]; - const [rootStart] = doc.range || [0]; - const error = doc.errors[0]; - if (error && (!error.pos || error.pos[0] < rootStart)) - throw new Error(); - let docStart = '+DOC'; - if (doc.directives.docStart) - docStart += ' ---'; - else if (doc.contents && - doc.contents.range[2] === doc.contents.range[0] && - !doc.contents.anchor && - !doc.contents.tag) - continue; - events.push(docStart); - addEvents(events, doc, error?.pos[0] ?? -1, root); - let docEnd = '-DOC'; - if (doc.directives.docEnd) - docEnd += ' ...'; - events.push(docEnd); - } - } - catch (e) { - return { events, error: error ?? e }; - } - events.push('-STR'); - return { events, error }; -} -function addEvents(events, doc, errPos, node) { - if (!node) { - events.push('=VAL :'); - return; - } - if (errPos !== -1 && identity.isNode(node) && node.range[0] >= errPos) - throw new Error(); - let props = ''; - let anchor = identity.isScalar(node) || identity.isCollection(node) ? node.anchor : undefined; - if (anchor) { - if (/\d$/.test(anchor)) { - const alt = anchor.replace(/\d$/, ''); - if (anchorExists(doc, alt)) - anchor = alt; - } - props = ` &${anchor}`; - } - if (identity.isNode(node) && node.tag) - props += ` <${node.tag}>`; - if (identity.isMap(node)) { - const ev = node.flow ? '+MAP {}' : '+MAP'; - events.push(`${ev}${props}`); - node.items.forEach(({ key, value }) => { - addEvents(events, doc, errPos, key); - addEvents(events, doc, errPos, value); - }); - events.push('-MAP'); - } - else if (identity.isSeq(node)) { - const ev = node.flow ? '+SEQ []' : '+SEQ'; - events.push(`${ev}${props}`); - node.items.forEach(item => { - addEvents(events, doc, errPos, item); - }); - events.push('-SEQ'); - } - else if (identity.isPair(node)) { - events.push(`+MAP${props}`); - addEvents(events, doc, errPos, node.key); - addEvents(events, doc, errPos, node.value); - events.push('-MAP'); - } - else if (identity.isAlias(node)) { - let alias = node.source; - if (alias && /\d$/.test(alias)) { - const alt = alias.replace(/\d$/, ''); - if (anchorExists(doc, alt)) - alias = alt; - } - events.push(`=ALI${props} *${alias}`); - } - else { - const scalar = scalarChar[String(node.type)]; - if (!scalar) - throw new Error(`Unexpected node type ${node.type}`); - const value = node.source - .replace(/\\/g, '\\\\') - .replace(/\0/g, '\\0') - .replace(/\x07/g, '\\a') - .replace(/\x08/g, '\\b') - .replace(/\t/g, '\\t') - .replace(/\n/g, '\\n') - .replace(/\v/g, '\\v') - .replace(/\f/g, '\\f') - .replace(/\r/g, '\\r') - .replace(/\x1b/g, '\\e'); - events.push(`=VAL${props} ${scalar}${value}`); - } -} - -exports.testEvents = testEvents; diff --git a/bin/node_modules/yaml/dist/util.d.ts b/bin/node_modules/yaml/dist/util.d.ts deleted file mode 100644 index 3d1b198..0000000 --- a/bin/node_modules/yaml/dist/util.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -export { createNode, CreateNodeContext } from './doc/createNode.js'; -export { debug, LogLevelId, warn } from './log.js'; -export { createPair } from './nodes/Pair.js'; -export { findPair } from './nodes/YAMLMap.js'; -export { toJS, ToJSContext } from './nodes/toJS.js'; -export { map as mapTag } from './schema/common/map.js'; -export { seq as seqTag } from './schema/common/seq.js'; -export { string as stringTag } from './schema/common/string.js'; -export { foldFlowLines, FoldOptions } from './stringify/foldFlowLines'; -export { StringifyContext } from './stringify/stringify.js'; -export { stringifyNumber } from './stringify/stringifyNumber.js'; -export { stringifyString } from './stringify/stringifyString.js'; diff --git a/bin/node_modules/yaml/dist/util.js b/bin/node_modules/yaml/dist/util.js deleted file mode 100644 index 2e0e5cd..0000000 --- a/bin/node_modules/yaml/dist/util.js +++ /dev/null @@ -1,28 +0,0 @@ -'use strict'; - -var createNode = require('./doc/createNode.js'); -var log = require('./log.js'); -var Pair = require('./nodes/Pair.js'); -var YAMLMap = require('./nodes/YAMLMap.js'); -var toJS = require('./nodes/toJS.js'); -var map = require('./schema/common/map.js'); -var seq = require('./schema/common/seq.js'); -var string = require('./schema/common/string.js'); -var foldFlowLines = require('./stringify/foldFlowLines.js'); -var stringifyNumber = require('./stringify/stringifyNumber.js'); -var stringifyString = require('./stringify/stringifyString.js'); - - - -exports.createNode = createNode.createNode; -exports.debug = log.debug; -exports.warn = log.warn; -exports.createPair = Pair.createPair; -exports.findPair = YAMLMap.findPair; -exports.toJS = toJS.toJS; -exports.mapTag = map.map; -exports.seqTag = seq.seq; -exports.stringTag = string.string; -exports.foldFlowLines = foldFlowLines.foldFlowLines; -exports.stringifyNumber = stringifyNumber.stringifyNumber; -exports.stringifyString = stringifyString.stringifyString; diff --git a/bin/node_modules/yaml/dist/visit.d.ts b/bin/node_modules/yaml/dist/visit.d.ts deleted file mode 100644 index 7a27bfc..0000000 --- a/bin/node_modules/yaml/dist/visit.d.ts +++ /dev/null @@ -1,102 +0,0 @@ -import type { Document } from './doc/Document.js'; -import type { Alias } from './nodes/Alias.js'; -import { Node } from './nodes/Node.js'; -import type { Pair } from './nodes/Pair.js'; -import type { Scalar } from './nodes/Scalar.js'; -import type { YAMLMap } from './nodes/YAMLMap.js'; -import type { YAMLSeq } from './nodes/YAMLSeq.js'; -export type visitorFn = (key: number | 'key' | 'value' | null, node: T, path: readonly (Document | Node | Pair)[]) => void | symbol | number | Node | Pair; -export type visitor = visitorFn | { - Alias?: visitorFn; - Collection?: visitorFn; - Map?: visitorFn; - Node?: visitorFn; - Pair?: visitorFn; - Scalar?: visitorFn; - Seq?: visitorFn; - Value?: visitorFn; -}; -export type asyncVisitorFn = (key: number | 'key' | 'value' | null, node: T, path: readonly (Document | Node | Pair)[]) => void | symbol | number | Node | Pair | Promise; -export type asyncVisitor = asyncVisitorFn | { - Alias?: asyncVisitorFn; - Collection?: asyncVisitorFn; - Map?: asyncVisitorFn; - Node?: asyncVisitorFn; - Pair?: asyncVisitorFn; - Scalar?: asyncVisitorFn; - Seq?: asyncVisitorFn; - Value?: asyncVisitorFn; -}; -/** - * Apply a visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -export declare function visit(node: Node | Document | null, visitor: visitor): void; -export declare namespace visit { - var BREAK: symbol; - var SKIP: symbol; - var REMOVE: symbol; -} -/** - * Apply an async visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `Promise`: Must resolve to one of the following values - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -export declare function visitAsync(node: Node | Document | null, visitor: asyncVisitor): Promise; -export declare namespace visitAsync { - var BREAK: symbol; - var SKIP: symbol; - var REMOVE: symbol; -} diff --git a/bin/node_modules/yaml/dist/visit.js b/bin/node_modules/yaml/dist/visit.js deleted file mode 100644 index f126e54..0000000 --- a/bin/node_modules/yaml/dist/visit.js +++ /dev/null @@ -1,236 +0,0 @@ -'use strict'; - -var identity = require('./nodes/identity.js'); - -const BREAK = Symbol('break visit'); -const SKIP = Symbol('skip children'); -const REMOVE = Symbol('remove node'); -/** - * Apply a visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -function visit(node, visitor) { - const visitor_ = initVisitor(visitor); - if (identity.isDocument(node)) { - const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - visit_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visit.BREAK = BREAK; -/** Do not visit the children of the current node */ -visit.SKIP = SKIP; -/** Remove the current node */ -visit.REMOVE = REMOVE; -function visit_(key, node, visitor, path) { - const ctrl = callVisitor(key, node, visitor, path); - if (identity.isNode(ctrl) || identity.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visit_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (identity.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = visit_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (identity.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = visit_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = visit_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -/** - * Apply an async visitor to an AST node or document. - * - * Walks through the tree (depth-first) starting from `node`, calling a - * `visitor` function with three arguments: - * - `key`: For sequence values and map `Pair`, the node's index in the - * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. - * `null` for the root node. - * - `node`: The current node. - * - `path`: The ancestry of the current node. - * - * The return value of the visitor may be used to control the traversal: - * - `Promise`: Must resolve to one of the following values - * - `undefined` (default): Do nothing and continue - * - `visit.SKIP`: Do not visit the children of this node, continue with next - * sibling - * - `visit.BREAK`: Terminate traversal completely - * - `visit.REMOVE`: Remove the current node, then continue with the next one - * - `Node`: Replace the current node, then continue by visiting it - * - `number`: While iterating the items of a sequence or map, set the index - * of the next step. This is useful especially if the index of the current - * node has changed. - * - * If `visitor` is a single function, it will be called with all values - * encountered in the tree, including e.g. `null` values. Alternatively, - * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, - * `Alias` and `Scalar` node. To define the same visitor function for more than - * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) - * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most - * specific defined one will be used for each node. - */ -async function visitAsync(node, visitor) { - const visitor_ = initVisitor(visitor); - if (identity.isDocument(node)) { - const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); - if (cd === REMOVE) - node.contents = null; - } - else - await visitAsync_(null, node, visitor_, Object.freeze([])); -} -// Without the `as symbol` casts, TS declares these in the `visit` -// namespace using `var`, but then complains about that because -// `unique symbol` must be `const`. -/** Terminate visit traversal completely */ -visitAsync.BREAK = BREAK; -/** Do not visit the children of the current node */ -visitAsync.SKIP = SKIP; -/** Remove the current node */ -visitAsync.REMOVE = REMOVE; -async function visitAsync_(key, node, visitor, path) { - const ctrl = await callVisitor(key, node, visitor, path); - if (identity.isNode(ctrl) || identity.isPair(ctrl)) { - replaceNode(key, path, ctrl); - return visitAsync_(key, ctrl, visitor, path); - } - if (typeof ctrl !== 'symbol') { - if (identity.isCollection(node)) { - path = Object.freeze(path.concat(node)); - for (let i = 0; i < node.items.length; ++i) { - const ci = await visitAsync_(i, node.items[i], visitor, path); - if (typeof ci === 'number') - i = ci - 1; - else if (ci === BREAK) - return BREAK; - else if (ci === REMOVE) { - node.items.splice(i, 1); - i -= 1; - } - } - } - else if (identity.isPair(node)) { - path = Object.freeze(path.concat(node)); - const ck = await visitAsync_('key', node.key, visitor, path); - if (ck === BREAK) - return BREAK; - else if (ck === REMOVE) - node.key = null; - const cv = await visitAsync_('value', node.value, visitor, path); - if (cv === BREAK) - return BREAK; - else if (cv === REMOVE) - node.value = null; - } - } - return ctrl; -} -function initVisitor(visitor) { - if (typeof visitor === 'object' && - (visitor.Collection || visitor.Node || visitor.Value)) { - return Object.assign({ - Alias: visitor.Node, - Map: visitor.Node, - Scalar: visitor.Node, - Seq: visitor.Node - }, visitor.Value && { - Map: visitor.Value, - Scalar: visitor.Value, - Seq: visitor.Value - }, visitor.Collection && { - Map: visitor.Collection, - Seq: visitor.Collection - }, visitor); - } - return visitor; -} -function callVisitor(key, node, visitor, path) { - if (typeof visitor === 'function') - return visitor(key, node, path); - if (identity.isMap(node)) - return visitor.Map?.(key, node, path); - if (identity.isSeq(node)) - return visitor.Seq?.(key, node, path); - if (identity.isPair(node)) - return visitor.Pair?.(key, node, path); - if (identity.isScalar(node)) - return visitor.Scalar?.(key, node, path); - if (identity.isAlias(node)) - return visitor.Alias?.(key, node, path); - return undefined; -} -function replaceNode(key, path, node) { - const parent = path[path.length - 1]; - if (identity.isCollection(parent)) { - parent.items[key] = node; - } - else if (identity.isPair(parent)) { - if (key === 'key') - parent.key = node; - else - parent.value = node; - } - else if (identity.isDocument(parent)) { - parent.contents = node; - } - else { - const pt = identity.isAlias(parent) ? 'alias' : 'scalar'; - throw new Error(`Cannot replace node with ${pt} parent`); - } -} - -exports.visit = visit; -exports.visitAsync = visitAsync; diff --git a/bin/node_modules/yaml/package.json b/bin/node_modules/yaml/package.json deleted file mode 100644 index a1103f9..0000000 --- a/bin/node_modules/yaml/package.json +++ /dev/null @@ -1,98 +0,0 @@ -{ - "name": "yaml", - "version": "2.4.3", - "license": "ISC", - "author": "Eemeli Aro ", - "repository": "github:eemeli/yaml", - "description": "JavaScript parser and stringifier for YAML", - "keywords": [ - "YAML", - "parser", - "stringifier" - ], - "homepage": "https://eemeli.org/yaml/", - "files": [ - "browser/", - "dist/", - "util.js" - ], - "type": "commonjs", - "main": "./dist/index.js", - "bin": "./bin.mjs", - "browser": { - "./dist/index.js": "./browser/index.js", - "./dist/util.js": "./browser/dist/util.js", - "./util.js": "./browser/dist/util.js" - }, - "exports": { - ".": { - "types": "./dist/index.d.ts", - "node": "./dist/index.js", - "default": "./browser/index.js" - }, - "./package.json": "./package.json", - "./util": { - "types": "./dist/util.d.ts", - "node": "./dist/util.js", - "default": "./browser/dist/util.js" - } - }, - "scripts": { - "build": "npm run build:node && npm run build:browser", - "build:browser": "rollup -c config/rollup.browser-config.mjs", - "build:node": "rollup -c config/rollup.node-config.mjs", - "clean": "git clean -fdxe node_modules", - "lint": "eslint src/", - "prettier": "prettier --write .", - "prestart": "npm run build:node", - "start": "node -i -e 'YAML=require(\"./dist/index.js\")'", - "test": "jest --config config/jest.config.js", - "test:all": "npm test && npm run test:types && npm run test:dist && npm run test:dist:types", - "test:browsers": "cd playground && npm test", - "test:dist": "npm run build:node && jest --config config/jest.config.js", - "test:dist:types": "tsc --allowJs --moduleResolution node --noEmit --target es5 dist/index.js", - "test:types": "tsc --noEmit && tsc --noEmit -p tests/tsconfig.json", - "docs:install": "cd docs-slate && bundle install", - "predocs:deploy": "node docs/prepare-docs.mjs", - "docs:deploy": "cd docs-slate && ./deploy.sh", - "predocs": "node docs/prepare-docs.mjs", - "docs": "cd docs-slate && bundle exec middleman server", - "preversion": "npm test && npm run build", - "prepublishOnly": "npm run clean && npm test && npm run build" - }, - "browserslist": "defaults, not ie 11", - "prettier": { - "arrowParens": "avoid", - "semi": false, - "singleQuote": true, - "trailingComma": "none" - }, - "devDependencies": { - "@babel/core": "^7.12.10", - "@babel/plugin-transform-class-properties": "^7.23.3", - "@babel/plugin-transform-nullish-coalescing-operator": "^7.23.4", - "@babel/plugin-transform-typescript": "^7.12.17", - "@babel/preset-env": "^7.12.11", - "@rollup/plugin-babel": "^6.0.3", - "@rollup/plugin-replace": "^5.0.2", - "@rollup/plugin-typescript": "^11.0.0", - "@types/jest": "^29.2.4", - "@types/node": "^20.11.20", - "@typescript-eslint/eslint-plugin": "^7.0.2", - "@typescript-eslint/parser": "^7.0.2", - "babel-jest": "^29.0.1", - "cross-env": "^7.0.3", - "eslint": "^8.2.0", - "eslint-config-prettier": "^9.0.0", - "fast-check": "^2.12.0", - "jest": "^29.0.1", - "jest-ts-webcompat-resolver": "^1.0.0", - "prettier": "^3.0.2", - "rollup": "^4.12.0", - "tslib": "^2.1.0", - "typescript": "^5.0.3" - }, - "engines": { - "node": ">= 14" - } -} diff --git a/bin/node_modules/yaml/util.js b/bin/node_modules/yaml/util.js deleted file mode 100644 index 070103f..0000000 --- a/bin/node_modules/yaml/util.js +++ /dev/null @@ -1,2 +0,0 @@ -// Re-exporter for Node.js < 12.16.0 -module.exports = require('./dist/util.js')