mirror of
https://github.com/dawidd6/action-ansible-playbook.git
synced 2024-11-22 07:26:25 +00:00
package: add yaml
This commit is contained in:
parent
d270136941
commit
7b2f0761a9
60 changed files with 16013 additions and 2 deletions
33
node_modules/.package-lock.json
generated
vendored
Normal file
33
node_modules/.package-lock.json
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
{
|
||||
"name": "action-ansible-playbook",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/@actions/core": {
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.6.tgz",
|
||||
"integrity": "sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA=="
|
||||
},
|
||||
"node_modules/@actions/exec": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.4.tgz",
|
||||
"integrity": "sha512-4DPChWow9yc9W3WqEbUj8Nr86xkpyE29ZzWjXucHItclLbEW6jr80Zx4nqv18QL6KK65+cifiQZXvnqgTV6oHw==",
|
||||
"dependencies": {
|
||||
"@actions/io": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/io": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.2.tgz",
|
||||
"integrity": "sha512-J8KuFqVPr3p6U8W93DOXlXW6zFvrQAJANdS+vw0YhusLIq+bszW8zmK2Fh1C2kDPX8FMvwIl1OUcFgvJoXLbAg=="
|
||||
},
|
||||
"node_modules/yaml": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.0.tgz",
|
||||
"integrity": "sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg==",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
13
node_modules/yaml/LICENSE
generated
vendored
Normal file
13
node_modules/yaml/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
Copyright 2018 Eemeli Aro <eemeli@gmail.com>
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any purpose
|
||||
with or without fee is hereby granted, provided that the above copyright notice
|
||||
and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
|
||||
THIS SOFTWARE.
|
127
node_modules/yaml/README.md
generated
vendored
Normal file
127
node_modules/yaml/README.md
generated
vendored
Normal file
|
@ -0,0 +1,127 @@
|
|||
# YAML <a href="https://www.npmjs.com/package/yaml"><img align="right" src="https://badge.fury.io/js/yaml.svg" title="npm package" /></a><a href="https://travis-ci.org/eemeli/yaml"><img align="right" src="https://travis-ci.org/eemeli/yaml.svg?branch=master" title="Build status" /></a>
|
||||
|
||||
`yaml` is a JavaScript parser and stringifier for [YAML](http://yaml.org/), a human friendly data serialization standard. It supports both parsing and stringifying data using all versions of YAML, along with all common data schemas. As a particularly distinguishing feature, `yaml` fully supports reading and writing comments and blank lines in YAML documents.
|
||||
|
||||
The library is released under the ISC open source license, and the code is [available on GitHub](https://github.com/eemeli/yaml/). It has no external dependencies and runs on Node.js 6 and later, and in browsers from IE 11 upwards.
|
||||
|
||||
For the purposes of versioning, any changes that break any of the endpoints or APIs documented here will be considered semver-major breaking changes. Undocumented library internals may change between minor versions, and previous APIs may be deprecated (but not removed).
|
||||
|
||||
For more information, see the project's documentation site: [**eemeli.org/yaml**](https://eemeli.org/yaml/)
|
||||
|
||||
To install:
|
||||
|
||||
```sh
|
||||
npm install yaml
|
||||
```
|
||||
|
||||
Note: `yaml` 0.x and 1.x are rather different implementations. For the earlier `yaml`, see [tj/js-yaml](https://github.com/tj/js-yaml).
|
||||
|
||||
## API Overview
|
||||
|
||||
The API provided by `yaml` has three layers, depending on how deep you need to go: [Parse & Stringify](https://eemeli.org/yaml/#parse-amp-stringify), [Documents](https://eemeli.org/yaml/#documents), and the [CST Parser](https://eemeli.org/yaml/#cst-parser). The first has the simplest API and "just works", the second gets you all the bells and whistles supported by the library along with a decent [AST](https://eemeli.org/yaml/#content-nodes), and the third is the closest to YAML source, making it fast, raw, and crude.
|
||||
|
||||
```js
|
||||
import YAML from 'yaml'
|
||||
// or
|
||||
const YAML = require('yaml')
|
||||
```
|
||||
|
||||
### Parse & Stringify
|
||||
|
||||
- [`YAML.parse(str, options): value`](https://eemeli.org/yaml/#yaml-parse)
|
||||
- [`YAML.stringify(value, options): string`](https://eemeli.org/yaml/#yaml-stringify)
|
||||
|
||||
### YAML Documents
|
||||
|
||||
- [`YAML.createNode(value, wrapScalars, tag): Node`](https://eemeli.org/yaml/#creating-nodes)
|
||||
- [`YAML.defaultOptions`](https://eemeli.org/yaml/#options)
|
||||
- [`YAML.Document`](https://eemeli.org/yaml/#yaml-documents)
|
||||
- [`constructor(options)`](https://eemeli.org/yaml/#creating-documents)
|
||||
- [`defaults`](https://eemeli.org/yaml/#options)
|
||||
- [`#anchors`](https://eemeli.org/yaml/#working-with-anchors)
|
||||
- [`#contents`](https://eemeli.org/yaml/#content-nodes)
|
||||
- [`#errors`](https://eemeli.org/yaml/#errors)
|
||||
- [`YAML.parseAllDocuments(str, options): YAML.Document[]`](https://eemeli.org/yaml/#parsing-documents)
|
||||
- [`YAML.parseDocument(str, options): YAML.Document`](https://eemeli.org/yaml/#parsing-documents)
|
||||
|
||||
```js
|
||||
import { Pair, YAMLMap, YAMLSeq } from 'yaml/types'
|
||||
```
|
||||
|
||||
- [`new Pair(key, value)`](https://eemeli.org/yaml/#creating-nodes)
|
||||
- [`new YAMLMap()`](https://eemeli.org/yaml/#creating-nodes)
|
||||
- [`new YAMLSeq()`](https://eemeli.org/yaml/#creating-nodes)
|
||||
|
||||
### CST Parser
|
||||
|
||||
```js
|
||||
import parseCST from 'yaml/parse-cst'
|
||||
```
|
||||
|
||||
- [`parseCST(str): CSTDocument[]`](https://eemeli.org/yaml/#parsecst)
|
||||
- [`YAML.parseCST(str): CSTDocument[]`](https://eemeli.org/yaml/#parsecst)
|
||||
|
||||
## YAML.parse
|
||||
|
||||
```yaml
|
||||
# file.yml
|
||||
YAML:
|
||||
- A human-readable data serialization language
|
||||
- https://en.wikipedia.org/wiki/YAML
|
||||
yaml:
|
||||
- A complete JavaScript implementation
|
||||
- https://www.npmjs.com/package/yaml
|
||||
```
|
||||
|
||||
```js
|
||||
import fs from 'fs'
|
||||
import YAML from 'yaml'
|
||||
|
||||
YAML.parse('3.14159')
|
||||
// 3.14159
|
||||
|
||||
YAML.parse('[ true, false, maybe, null ]\n')
|
||||
// [ true, false, 'maybe', null ]
|
||||
|
||||
const file = fs.readFileSync('./file.yml', 'utf8')
|
||||
YAML.parse(file)
|
||||
// { YAML:
|
||||
// [ 'A human-readable data serialization language',
|
||||
// 'https://en.wikipedia.org/wiki/YAML' ],
|
||||
// yaml:
|
||||
// [ 'A complete JavaScript implementation',
|
||||
// 'https://www.npmjs.com/package/yaml' ] }
|
||||
```
|
||||
|
||||
## YAML.stringify
|
||||
|
||||
```js
|
||||
import YAML from 'yaml'
|
||||
|
||||
YAML.stringify(3.14159)
|
||||
// '3.14159\n'
|
||||
|
||||
YAML.stringify([true, false, 'maybe', null])
|
||||
// `- true
|
||||
// - false
|
||||
// - maybe
|
||||
// - null
|
||||
// `
|
||||
|
||||
YAML.stringify({ number: 3, plain: 'string', block: 'two\nlines\n' })
|
||||
// `number: 3
|
||||
// plain: string
|
||||
// block: >
|
||||
// two
|
||||
//
|
||||
// lines
|
||||
// `
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
Browser testing provided by:
|
||||
|
||||
<a href="https://www.browserstack.com/open-source">
|
||||
<img width=200 src="https://eemeli.org/yaml/images/browserstack.svg" />
|
||||
</a>
|
1277
node_modules/yaml/browser/dist/PlainValue-ff5147c6.js
generated
vendored
Normal file
1277
node_modules/yaml/browser/dist/PlainValue-ff5147c6.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
679
node_modules/yaml/browser/dist/Schema-2bf2c74e.js
generated
vendored
Normal file
679
node_modules/yaml/browser/dist/Schema-2bf2c74e.js
generated
vendored
Normal file
|
@ -0,0 +1,679 @@
|
|||
import { _ as _createForOfIteratorHelper, h as _slicedToArray, a as _typeof, b as _createClass, e as _defineProperty, c as _classCallCheck, d as defaultTagPrefix, n as defaultTags } from './PlainValue-ff5147c6.js';
|
||||
import { d as YAMLMap, g as resolveMap, Y as YAMLSeq, h as resolveSeq, j as resolveString, c as stringifyString, s as strOptions, S as Scalar, n as nullOptions, a as boolOptions, i as intOptions, k as stringifyNumber, N as Node, A as Alias, P as Pair } from './resolveSeq-04825f30.js';
|
||||
import { b as binary, o as omap, p as pairs, s as set, i as intTime, f as floatTime, t as timestamp, a as warnOptionDeprecation } from './warnings-0e4b70d3.js';
|
||||
|
||||
function createMap(schema, obj, ctx) {
|
||||
var map = new YAMLMap(schema);
|
||||
|
||||
if (obj instanceof Map) {
|
||||
var _iterator = _createForOfIteratorHelper(obj),
|
||||
_step;
|
||||
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
var _step$value = _slicedToArray(_step.value, 2),
|
||||
key = _step$value[0],
|
||||
value = _step$value[1];
|
||||
|
||||
map.items.push(schema.createPair(key, value, ctx));
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
} else if (obj && _typeof(obj) === 'object') {
|
||||
for (var _i = 0, _Object$keys = Object.keys(obj); _i < _Object$keys.length; _i++) {
|
||||
var _key = _Object$keys[_i];
|
||||
map.items.push(schema.createPair(_key, obj[_key], ctx));
|
||||
}
|
||||
}
|
||||
|
||||
if (typeof schema.sortMapEntries === 'function') {
|
||||
map.items.sort(schema.sortMapEntries);
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
var map = {
|
||||
createNode: createMap,
|
||||
default: true,
|
||||
nodeClass: YAMLMap,
|
||||
tag: 'tag:yaml.org,2002:map',
|
||||
resolve: resolveMap
|
||||
};
|
||||
|
||||
function createSeq(schema, obj, ctx) {
|
||||
var seq = new YAMLSeq(schema);
|
||||
|
||||
if (obj && obj[Symbol.iterator]) {
|
||||
var _iterator = _createForOfIteratorHelper(obj),
|
||||
_step;
|
||||
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
var it = _step.value;
|
||||
var v = schema.createNode(it, ctx.wrapScalars, null, ctx);
|
||||
seq.items.push(v);
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
}
|
||||
|
||||
return seq;
|
||||
}
|
||||
|
||||
var seq = {
|
||||
createNode: createSeq,
|
||||
default: true,
|
||||
nodeClass: YAMLSeq,
|
||||
tag: 'tag:yaml.org,2002:seq',
|
||||
resolve: resolveSeq
|
||||
};
|
||||
|
||||
var string = {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'string';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:str',
|
||||
resolve: resolveString,
|
||||
stringify: function stringify(item, ctx, onComment, onChompKeep) {
|
||||
ctx = Object.assign({
|
||||
actualString: true
|
||||
}, ctx);
|
||||
return stringifyString(item, ctx, onComment, onChompKeep);
|
||||
},
|
||||
options: strOptions
|
||||
};
|
||||
|
||||
var failsafe = [map, seq, string];
|
||||
|
||||
/* global BigInt */
|
||||
|
||||
var intIdentify = function intIdentify(value) {
|
||||
return typeof value === 'bigint' || Number.isInteger(value);
|
||||
};
|
||||
|
||||
var intResolve = function intResolve(src, part, radix) {
|
||||
return intOptions.asBigInt ? BigInt(src) : parseInt(part, radix);
|
||||
};
|
||||
|
||||
function intStringify(node, radix, prefix) {
|
||||
var value = node.value;
|
||||
if (intIdentify(value) && value >= 0) return prefix + value.toString(radix);
|
||||
return stringifyNumber(node);
|
||||
}
|
||||
|
||||
var nullObj = {
|
||||
identify: function identify(value) {
|
||||
return value == null;
|
||||
},
|
||||
createNode: function createNode(schema, value, ctx) {
|
||||
return ctx.wrapScalars ? new Scalar(null) : null;
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:null',
|
||||
test: /^(?:~|[Nn]ull|NULL)?$/,
|
||||
resolve: function resolve() {
|
||||
return null;
|
||||
},
|
||||
options: nullOptions,
|
||||
stringify: function stringify() {
|
||||
return nullOptions.nullStr;
|
||||
}
|
||||
};
|
||||
var boolObj = {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'boolean';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
|
||||
resolve: function resolve(str) {
|
||||
return str[0] === 't' || str[0] === 'T';
|
||||
},
|
||||
options: boolOptions,
|
||||
stringify: function stringify(_ref) {
|
||||
var value = _ref.value;
|
||||
return value ? boolOptions.trueStr : boolOptions.falseStr;
|
||||
}
|
||||
};
|
||||
var octObj = {
|
||||
identify: function identify(value) {
|
||||
return intIdentify(value) && value >= 0;
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'OCT',
|
||||
test: /^0o([0-7]+)$/,
|
||||
resolve: function resolve(str, oct) {
|
||||
return intResolve(str, oct, 8);
|
||||
},
|
||||
options: intOptions,
|
||||
stringify: function stringify(node) {
|
||||
return intStringify(node, 8, '0o');
|
||||
}
|
||||
};
|
||||
var intObj = {
|
||||
identify: intIdentify,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
test: /^[-+]?[0-9]+$/,
|
||||
resolve: function resolve(str) {
|
||||
return intResolve(str, str, 10);
|
||||
},
|
||||
options: intOptions,
|
||||
stringify: stringifyNumber
|
||||
};
|
||||
var hexObj = {
|
||||
identify: function identify(value) {
|
||||
return intIdentify(value) && value >= 0;
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'HEX',
|
||||
test: /^0x([0-9a-fA-F]+)$/,
|
||||
resolve: function resolve(str, hex) {
|
||||
return intResolve(str, hex, 16);
|
||||
},
|
||||
options: intOptions,
|
||||
stringify: function stringify(node) {
|
||||
return intStringify(node, 16, '0x');
|
||||
}
|
||||
};
|
||||
var nanObj = {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'number';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^(?:[-+]?\.inf|(\.nan))$/i,
|
||||
resolve: function resolve(str, nan) {
|
||||
return nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY;
|
||||
},
|
||||
stringify: stringifyNumber
|
||||
};
|
||||
var expObj = {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'number';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
format: 'EXP',
|
||||
test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
|
||||
resolve: function resolve(str) {
|
||||
return parseFloat(str);
|
||||
},
|
||||
stringify: function stringify(_ref2) {
|
||||
var value = _ref2.value;
|
||||
return Number(value).toExponential();
|
||||
}
|
||||
};
|
||||
var floatObj = {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'number';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^[-+]?(?:\.([0-9]+)|[0-9]+\.([0-9]*))$/,
|
||||
resolve: function resolve(str, frac1, frac2) {
|
||||
var frac = frac1 || frac2;
|
||||
var node = new Scalar(parseFloat(str));
|
||||
if (frac && frac[frac.length - 1] === '0') node.minFractionDigits = frac.length;
|
||||
return node;
|
||||
},
|
||||
stringify: stringifyNumber
|
||||
};
|
||||
var core = failsafe.concat([nullObj, boolObj, octObj, intObj, hexObj, nanObj, expObj, floatObj]);
|
||||
|
||||
/* global BigInt */
|
||||
|
||||
var intIdentify$1 = function intIdentify(value) {
|
||||
return typeof value === 'bigint' || Number.isInteger(value);
|
||||
};
|
||||
|
||||
var stringifyJSON = function stringifyJSON(_ref) {
|
||||
var value = _ref.value;
|
||||
return JSON.stringify(value);
|
||||
};
|
||||
|
||||
var json = [map, seq, {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'string';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:str',
|
||||
resolve: resolveString,
|
||||
stringify: stringifyJSON
|
||||
}, {
|
||||
identify: function identify(value) {
|
||||
return value == null;
|
||||
},
|
||||
createNode: function createNode(schema, value, ctx) {
|
||||
return ctx.wrapScalars ? new Scalar(null) : null;
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:null',
|
||||
test: /^null$/,
|
||||
resolve: function resolve() {
|
||||
return null;
|
||||
},
|
||||
stringify: stringifyJSON
|
||||
}, {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'boolean';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^true|false$/,
|
||||
resolve: function resolve(str) {
|
||||
return str === 'true';
|
||||
},
|
||||
stringify: stringifyJSON
|
||||
}, {
|
||||
identify: intIdentify$1,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
test: /^-?(?:0|[1-9][0-9]*)$/,
|
||||
resolve: function resolve(str) {
|
||||
return intOptions.asBigInt ? BigInt(str) : parseInt(str, 10);
|
||||
},
|
||||
stringify: function stringify(_ref2) {
|
||||
var value = _ref2.value;
|
||||
return intIdentify$1(value) ? value.toString() : JSON.stringify(value);
|
||||
}
|
||||
}, {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'number';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
|
||||
resolve: function resolve(str) {
|
||||
return parseFloat(str);
|
||||
},
|
||||
stringify: stringifyJSON
|
||||
}];
|
||||
|
||||
json.scalarFallback = function (str) {
|
||||
throw new SyntaxError("Unresolved plain scalar ".concat(JSON.stringify(str)));
|
||||
};
|
||||
|
||||
/* global BigInt */
|
||||
|
||||
var boolStringify = function boolStringify(_ref) {
|
||||
var value = _ref.value;
|
||||
return value ? boolOptions.trueStr : boolOptions.falseStr;
|
||||
};
|
||||
|
||||
var intIdentify$2 = function intIdentify(value) {
|
||||
return typeof value === 'bigint' || Number.isInteger(value);
|
||||
};
|
||||
|
||||
function intResolve$1(sign, src, radix) {
|
||||
var str = src.replace(/_/g, '');
|
||||
|
||||
if (intOptions.asBigInt) {
|
||||
switch (radix) {
|
||||
case 2:
|
||||
str = "0b".concat(str);
|
||||
break;
|
||||
|
||||
case 8:
|
||||
str = "0o".concat(str);
|
||||
break;
|
||||
|
||||
case 16:
|
||||
str = "0x".concat(str);
|
||||
break;
|
||||
}
|
||||
|
||||
var _n = BigInt(str);
|
||||
|
||||
return sign === '-' ? BigInt(-1) * _n : _n;
|
||||
}
|
||||
|
||||
var n = parseInt(str, radix);
|
||||
return sign === '-' ? -1 * n : n;
|
||||
}
|
||||
|
||||
function intStringify$1(node, radix, prefix) {
|
||||
var value = node.value;
|
||||
|
||||
if (intIdentify$2(value)) {
|
||||
var str = value.toString(radix);
|
||||
return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
|
||||
}
|
||||
|
||||
return stringifyNumber(node);
|
||||
}
|
||||
|
||||
var yaml11 = failsafe.concat([{
|
||||
identify: function identify(value) {
|
||||
return value == null;
|
||||
},
|
||||
createNode: function createNode(schema, value, ctx) {
|
||||
return ctx.wrapScalars ? new Scalar(null) : null;
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:null',
|
||||
test: /^(?:~|[Nn]ull|NULL)?$/,
|
||||
resolve: function resolve() {
|
||||
return null;
|
||||
},
|
||||
options: nullOptions,
|
||||
stringify: function stringify() {
|
||||
return nullOptions.nullStr;
|
||||
}
|
||||
}, {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'boolean';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
|
||||
resolve: function resolve() {
|
||||
return true;
|
||||
},
|
||||
options: boolOptions,
|
||||
stringify: boolStringify
|
||||
}, {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'boolean';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
|
||||
resolve: function resolve() {
|
||||
return false;
|
||||
},
|
||||
options: boolOptions,
|
||||
stringify: boolStringify
|
||||
}, {
|
||||
identify: intIdentify$2,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'BIN',
|
||||
test: /^([-+]?)0b([0-1_]+)$/,
|
||||
resolve: function resolve(str, sign, bin) {
|
||||
return intResolve$1(sign, bin, 2);
|
||||
},
|
||||
stringify: function stringify(node) {
|
||||
return intStringify$1(node, 2, '0b');
|
||||
}
|
||||
}, {
|
||||
identify: intIdentify$2,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'OCT',
|
||||
test: /^([-+]?)0([0-7_]+)$/,
|
||||
resolve: function resolve(str, sign, oct) {
|
||||
return intResolve$1(sign, oct, 8);
|
||||
},
|
||||
stringify: function stringify(node) {
|
||||
return intStringify$1(node, 8, '0');
|
||||
}
|
||||
}, {
|
||||
identify: intIdentify$2,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
test: /^([-+]?)([0-9][0-9_]*)$/,
|
||||
resolve: function resolve(str, sign, abs) {
|
||||
return intResolve$1(sign, abs, 10);
|
||||
},
|
||||
stringify: stringifyNumber
|
||||
}, {
|
||||
identify: intIdentify$2,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'HEX',
|
||||
test: /^([-+]?)0x([0-9a-fA-F_]+)$/,
|
||||
resolve: function resolve(str, sign, hex) {
|
||||
return intResolve$1(sign, hex, 16);
|
||||
},
|
||||
stringify: function stringify(node) {
|
||||
return intStringify$1(node, 16, '0x');
|
||||
}
|
||||
}, {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'number';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^(?:[-+]?\.inf|(\.nan))$/i,
|
||||
resolve: function resolve(str, nan) {
|
||||
return nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY;
|
||||
},
|
||||
stringify: stringifyNumber
|
||||
}, {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'number';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
format: 'EXP',
|
||||
test: /^[-+]?([0-9][0-9_]*)?(\.[0-9_]*)?[eE][-+]?[0-9]+$/,
|
||||
resolve: function resolve(str) {
|
||||
return parseFloat(str.replace(/_/g, ''));
|
||||
},
|
||||
stringify: function stringify(_ref2) {
|
||||
var value = _ref2.value;
|
||||
return Number(value).toExponential();
|
||||
}
|
||||
}, {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'number';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^[-+]?(?:[0-9][0-9_]*)?\.([0-9_]*)$/,
|
||||
resolve: function resolve(str, frac) {
|
||||
var node = new Scalar(parseFloat(str.replace(/_/g, '')));
|
||||
|
||||
if (frac) {
|
||||
var f = frac.replace(/_/g, '');
|
||||
if (f[f.length - 1] === '0') node.minFractionDigits = f.length;
|
||||
}
|
||||
|
||||
return node;
|
||||
},
|
||||
stringify: stringifyNumber
|
||||
}], binary, omap, pairs, set, intTime, floatTime, timestamp);
|
||||
|
||||
var schemas = {
|
||||
core: core,
|
||||
failsafe: failsafe,
|
||||
json: json,
|
||||
yaml11: yaml11
|
||||
};
|
||||
var tags = {
|
||||
binary: binary,
|
||||
bool: boolObj,
|
||||
float: floatObj,
|
||||
floatExp: expObj,
|
||||
floatNaN: nanObj,
|
||||
floatTime: floatTime,
|
||||
int: intObj,
|
||||
intHex: hexObj,
|
||||
intOct: octObj,
|
||||
intTime: intTime,
|
||||
map: map,
|
||||
null: nullObj,
|
||||
omap: omap,
|
||||
pairs: pairs,
|
||||
seq: seq,
|
||||
set: set,
|
||||
timestamp: timestamp
|
||||
};
|
||||
|
||||
function findTagObject(value, tagName, tags) {
|
||||
if (tagName) {
|
||||
var match = tags.filter(function (t) {
|
||||
return t.tag === tagName;
|
||||
});
|
||||
var tagObj = match.find(function (t) {
|
||||
return !t.format;
|
||||
}) || match[0];
|
||||
if (!tagObj) throw new Error("Tag ".concat(tagName, " not found"));
|
||||
return tagObj;
|
||||
} // TODO: deprecate/remove class check
|
||||
|
||||
|
||||
return tags.find(function (t) {
|
||||
return (t.identify && t.identify(value) || t.class && value instanceof t.class) && !t.format;
|
||||
});
|
||||
}
|
||||
|
||||
function createNode(value, tagName, ctx) {
|
||||
if (value instanceof Node) return value;
|
||||
var defaultPrefix = ctx.defaultPrefix,
|
||||
onTagObj = ctx.onTagObj,
|
||||
prevObjects = ctx.prevObjects,
|
||||
schema = ctx.schema,
|
||||
wrapScalars = ctx.wrapScalars;
|
||||
if (tagName && tagName.startsWith('!!')) tagName = defaultPrefix + tagName.slice(2);
|
||||
var tagObj = findTagObject(value, tagName, schema.tags);
|
||||
|
||||
if (!tagObj) {
|
||||
if (typeof value.toJSON === 'function') value = value.toJSON();
|
||||
if (_typeof(value) !== 'object') return wrapScalars ? new Scalar(value) : value;
|
||||
tagObj = value instanceof Map ? map : value[Symbol.iterator] ? seq : map;
|
||||
}
|
||||
|
||||
if (onTagObj) {
|
||||
onTagObj(tagObj);
|
||||
delete ctx.onTagObj;
|
||||
} // Detect duplicate references to the same object & use Alias nodes for all
|
||||
// after first. The `obj` wrapper allows for circular references to resolve.
|
||||
|
||||
|
||||
var obj = {};
|
||||
|
||||
if (value && _typeof(value) === 'object' && prevObjects) {
|
||||
var prev = prevObjects.get(value);
|
||||
|
||||
if (prev) {
|
||||
var alias = new Alias(prev); // leaves source dirty; must be cleaned by caller
|
||||
|
||||
ctx.aliasNodes.push(alias); // defined along with prevObjects
|
||||
|
||||
return alias;
|
||||
}
|
||||
|
||||
obj.value = value;
|
||||
prevObjects.set(value, obj);
|
||||
}
|
||||
|
||||
obj.node = tagObj.createNode ? tagObj.createNode(ctx.schema, value, ctx) : wrapScalars ? new Scalar(value) : value;
|
||||
if (tagName && obj.node instanceof Node) obj.node.tag = tagName;
|
||||
return obj.node;
|
||||
}
|
||||
|
||||
function getSchemaTags(schemas, knownTags, customTags, schemaId) {
|
||||
var tags = schemas[schemaId.replace(/\W/g, '')]; // 'yaml-1.1' -> 'yaml11'
|
||||
|
||||
if (!tags) {
|
||||
var keys = Object.keys(schemas).map(function (key) {
|
||||
return JSON.stringify(key);
|
||||
}).join(', ');
|
||||
throw new Error("Unknown schema \"".concat(schemaId, "\"; use one of ").concat(keys));
|
||||
}
|
||||
|
||||
if (Array.isArray(customTags)) {
|
||||
var _iterator = _createForOfIteratorHelper(customTags),
|
||||
_step;
|
||||
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
var tag = _step.value;
|
||||
tags = tags.concat(tag);
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
} else if (typeof customTags === 'function') {
|
||||
tags = customTags(tags.slice());
|
||||
}
|
||||
|
||||
for (var i = 0; i < tags.length; ++i) {
|
||||
var _tag = tags[i];
|
||||
|
||||
if (typeof _tag === 'string') {
|
||||
var tagObj = knownTags[_tag];
|
||||
|
||||
if (!tagObj) {
|
||||
var _keys = Object.keys(knownTags).map(function (key) {
|
||||
return JSON.stringify(key);
|
||||
}).join(', ');
|
||||
|
||||
throw new Error("Unknown custom tag \"".concat(_tag, "\"; use one of ").concat(_keys));
|
||||
}
|
||||
|
||||
tags[i] = tagObj;
|
||||
}
|
||||
}
|
||||
|
||||
return tags;
|
||||
}
|
||||
|
||||
var sortMapEntriesByKey = function sortMapEntriesByKey(a, b) {
|
||||
return a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
|
||||
};
|
||||
|
||||
var Schema = /*#__PURE__*/function () {
|
||||
// TODO: remove in v2
|
||||
// TODO: remove in v2
|
||||
function Schema(_ref) {
|
||||
var customTags = _ref.customTags,
|
||||
merge = _ref.merge,
|
||||
schema = _ref.schema,
|
||||
sortMapEntries = _ref.sortMapEntries,
|
||||
deprecatedCustomTags = _ref.tags;
|
||||
|
||||
_classCallCheck(this, Schema);
|
||||
|
||||
this.merge = !!merge;
|
||||
this.name = schema;
|
||||
this.sortMapEntries = sortMapEntries === true ? sortMapEntriesByKey : sortMapEntries || null;
|
||||
if (!customTags && deprecatedCustomTags) warnOptionDeprecation('tags', 'customTags');
|
||||
this.tags = getSchemaTags(schemas, tags, customTags || deprecatedCustomTags, schema);
|
||||
}
|
||||
|
||||
_createClass(Schema, [{
|
||||
key: "createNode",
|
||||
value: function createNode$1(value, wrapScalars, tagName, ctx) {
|
||||
var baseCtx = {
|
||||
defaultPrefix: Schema.defaultPrefix,
|
||||
schema: this,
|
||||
wrapScalars: wrapScalars
|
||||
};
|
||||
var createCtx = ctx ? Object.assign(ctx, baseCtx) : baseCtx;
|
||||
return createNode(value, tagName, createCtx);
|
||||
}
|
||||
}, {
|
||||
key: "createPair",
|
||||
value: function createPair(key, value, ctx) {
|
||||
if (!ctx) ctx = {
|
||||
wrapScalars: true
|
||||
};
|
||||
var k = this.createNode(key, ctx.wrapScalars, null, ctx);
|
||||
var v = this.createNode(value, ctx.wrapScalars, null, ctx);
|
||||
return new Pair(k, v);
|
||||
}
|
||||
}]);
|
||||
|
||||
return Schema;
|
||||
}();
|
||||
|
||||
_defineProperty(Schema, "defaultPrefix", defaultTagPrefix);
|
||||
|
||||
_defineProperty(Schema, "defaultTags", defaultTags);
|
||||
|
||||
export { Schema as S };
|
1004
node_modules/yaml/browser/dist/index.js
generated
vendored
Normal file
1004
node_modules/yaml/browser/dist/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
3
node_modules/yaml/browser/dist/legacy-exports.js
generated
vendored
Normal file
3
node_modules/yaml/browser/dist/legacy-exports.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
import './PlainValue-ff5147c6.js';
|
||||
import './resolveSeq-04825f30.js';
|
||||
export { b as binary, f as floatTime, i as intTime, o as omap, p as pairs, s as set, t as timestamp, c as warnFileDeprecation } from './warnings-0e4b70d3.js';
|
1904
node_modules/yaml/browser/dist/parse-cst.js
generated
vendored
Normal file
1904
node_modules/yaml/browser/dist/parse-cst.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
2373
node_modules/yaml/browser/dist/resolveSeq-04825f30.js
generated
vendored
Normal file
2373
node_modules/yaml/browser/dist/resolveSeq-04825f30.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
4
node_modules/yaml/browser/dist/types.js
generated
vendored
Normal file
4
node_modules/yaml/browser/dist/types.js
generated
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
import './PlainValue-ff5147c6.js';
|
||||
export { A as Alias, C as Collection, M as Merge, N as Node, P as Pair, S as Scalar, d as YAMLMap, Y as YAMLSeq, b as binaryOptions, a as boolOptions, i as intOptions, n as nullOptions, s as strOptions } from './resolveSeq-04825f30.js';
|
||||
export { S as Schema } from './Schema-2bf2c74e.js';
|
||||
import './warnings-0e4b70d3.js';
|
2
node_modules/yaml/browser/dist/util.js
generated
vendored
Normal file
2
node_modules/yaml/browser/dist/util.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
export { T as Type, i as YAMLError, o as YAMLReferenceError, g as YAMLSemanticError, Y as YAMLSyntaxError, f as YAMLWarning } from './PlainValue-ff5147c6.js';
|
||||
export { l as findPair, g as parseMap, h as parseSeq, k as stringifyNumber, c as stringifyString, t as toJSON } from './resolveSeq-04825f30.js';
|
499
node_modules/yaml/browser/dist/warnings-0e4b70d3.js
generated
vendored
Normal file
499
node_modules/yaml/browser/dist/warnings-0e4b70d3.js
generated
vendored
Normal file
|
@ -0,0 +1,499 @@
|
|||
import { o as YAMLReferenceError, T as Type, g as YAMLSemanticError, _ as _createForOfIteratorHelper, e as _defineProperty, j as _inherits, k as _createSuper, c as _classCallCheck, p as _assertThisInitialized, b as _createClass, a as _typeof, l as _get, m as _getPrototypeOf } from './PlainValue-ff5147c6.js';
|
||||
import { j as resolveString, b as binaryOptions, c as stringifyString, h as resolveSeq, P as Pair, d as YAMLMap, Y as YAMLSeq, t as toJSON, S as Scalar, l as findPair, g as resolveMap, k as stringifyNumber } from './resolveSeq-04825f30.js';
|
||||
|
||||
/* global atob, btoa, Buffer */
|
||||
var binary = {
|
||||
identify: function identify(value) {
|
||||
return value instanceof Uint8Array;
|
||||
},
|
||||
// Buffer inherits from Uint8Array
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:binary',
|
||||
|
||||
/**
|
||||
* Returns a Buffer in node and an Uint8Array in browsers
|
||||
*
|
||||
* To use the resulting buffer as an image, you'll want to do something like:
|
||||
*
|
||||
* const blob = new Blob([buffer], { type: 'image/jpeg' })
|
||||
* document.querySelector('#photo').src = URL.createObjectURL(blob)
|
||||
*/
|
||||
resolve: function resolve(doc, node) {
|
||||
var src = resolveString(doc, node);
|
||||
|
||||
if (typeof Buffer === 'function') {
|
||||
return Buffer.from(src, 'base64');
|
||||
} else if (typeof atob === 'function') {
|
||||
// On IE 11, atob() can't handle newlines
|
||||
var str = atob(src.replace(/[\n\r]/g, ''));
|
||||
var buffer = new Uint8Array(str.length);
|
||||
|
||||
for (var i = 0; i < str.length; ++i) {
|
||||
buffer[i] = str.charCodeAt(i);
|
||||
}
|
||||
|
||||
return buffer;
|
||||
} else {
|
||||
var msg = 'This environment does not support reading binary tags; either Buffer or atob is required';
|
||||
doc.errors.push(new YAMLReferenceError(node, msg));
|
||||
return null;
|
||||
}
|
||||
},
|
||||
options: binaryOptions,
|
||||
stringify: function stringify(_ref, ctx, onComment, onChompKeep) {
|
||||
var comment = _ref.comment,
|
||||
type = _ref.type,
|
||||
value = _ref.value;
|
||||
var src;
|
||||
|
||||
if (typeof Buffer === 'function') {
|
||||
src = value instanceof Buffer ? value.toString('base64') : Buffer.from(value.buffer).toString('base64');
|
||||
} else if (typeof btoa === 'function') {
|
||||
var s = '';
|
||||
|
||||
for (var i = 0; i < value.length; ++i) {
|
||||
s += String.fromCharCode(value[i]);
|
||||
}
|
||||
|
||||
src = btoa(s);
|
||||
} else {
|
||||
throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
|
||||
}
|
||||
|
||||
if (!type) type = binaryOptions.defaultType;
|
||||
|
||||
if (type === Type.QUOTE_DOUBLE) {
|
||||
value = src;
|
||||
} else {
|
||||
var lineWidth = binaryOptions.lineWidth;
|
||||
var n = Math.ceil(src.length / lineWidth);
|
||||
var lines = new Array(n);
|
||||
|
||||
for (var _i = 0, o = 0; _i < n; ++_i, o += lineWidth) {
|
||||
lines[_i] = src.substr(o, lineWidth);
|
||||
}
|
||||
|
||||
value = lines.join(type === Type.BLOCK_LITERAL ? '\n' : ' ');
|
||||
}
|
||||
|
||||
return stringifyString({
|
||||
comment: comment,
|
||||
type: type,
|
||||
value: value
|
||||
}, ctx, onComment, onChompKeep);
|
||||
}
|
||||
};
|
||||
|
||||
function parsePairs(doc, cst) {
|
||||
var seq = resolveSeq(doc, cst);
|
||||
|
||||
for (var i = 0; i < seq.items.length; ++i) {
|
||||
var item = seq.items[i];
|
||||
if (item instanceof Pair) continue;else if (item instanceof YAMLMap) {
|
||||
if (item.items.length > 1) {
|
||||
var msg = 'Each pair must have its own sequence indicator';
|
||||
throw new YAMLSemanticError(cst, msg);
|
||||
}
|
||||
|
||||
var pair = item.items[0] || new Pair();
|
||||
if (item.commentBefore) pair.commentBefore = pair.commentBefore ? "".concat(item.commentBefore, "\n").concat(pair.commentBefore) : item.commentBefore;
|
||||
if (item.comment) pair.comment = pair.comment ? "".concat(item.comment, "\n").concat(pair.comment) : item.comment;
|
||||
item = pair;
|
||||
}
|
||||
seq.items[i] = item instanceof Pair ? item : new Pair(item);
|
||||
}
|
||||
|
||||
return seq;
|
||||
}
|
||||
function createPairs(schema, iterable, ctx) {
|
||||
var pairs = new YAMLSeq(schema);
|
||||
pairs.tag = 'tag:yaml.org,2002:pairs';
|
||||
|
||||
var _iterator = _createForOfIteratorHelper(iterable),
|
||||
_step;
|
||||
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
var it = _step.value;
|
||||
var key = void 0,
|
||||
value = void 0;
|
||||
|
||||
if (Array.isArray(it)) {
|
||||
if (it.length === 2) {
|
||||
key = it[0];
|
||||
value = it[1];
|
||||
} else throw new TypeError("Expected [key, value] tuple: ".concat(it));
|
||||
} else if (it && it instanceof Object) {
|
||||
var keys = Object.keys(it);
|
||||
|
||||
if (keys.length === 1) {
|
||||
key = keys[0];
|
||||
value = it[key];
|
||||
} else throw new TypeError("Expected { key: value } tuple: ".concat(it));
|
||||
} else {
|
||||
key = it;
|
||||
}
|
||||
|
||||
var pair = schema.createPair(key, value, ctx);
|
||||
pairs.items.push(pair);
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
|
||||
return pairs;
|
||||
}
|
||||
var pairs = {
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:pairs',
|
||||
resolve: parsePairs,
|
||||
createNode: createPairs
|
||||
};
|
||||
|
||||
var YAMLOMap = /*#__PURE__*/function (_YAMLSeq) {
|
||||
_inherits(YAMLOMap, _YAMLSeq);
|
||||
|
||||
var _super = _createSuper(YAMLOMap);
|
||||
|
||||
function YAMLOMap() {
|
||||
var _this;
|
||||
|
||||
_classCallCheck(this, YAMLOMap);
|
||||
|
||||
_this = _super.call(this);
|
||||
|
||||
_defineProperty(_assertThisInitialized(_this), "add", YAMLMap.prototype.add.bind(_assertThisInitialized(_this)));
|
||||
|
||||
_defineProperty(_assertThisInitialized(_this), "delete", YAMLMap.prototype.delete.bind(_assertThisInitialized(_this)));
|
||||
|
||||
_defineProperty(_assertThisInitialized(_this), "get", YAMLMap.prototype.get.bind(_assertThisInitialized(_this)));
|
||||
|
||||
_defineProperty(_assertThisInitialized(_this), "has", YAMLMap.prototype.has.bind(_assertThisInitialized(_this)));
|
||||
|
||||
_defineProperty(_assertThisInitialized(_this), "set", YAMLMap.prototype.set.bind(_assertThisInitialized(_this)));
|
||||
|
||||
_this.tag = YAMLOMap.tag;
|
||||
return _this;
|
||||
}
|
||||
|
||||
_createClass(YAMLOMap, [{
|
||||
key: "toJSON",
|
||||
value: function toJSON$1(_, ctx) {
|
||||
var map = new Map();
|
||||
if (ctx && ctx.onCreate) ctx.onCreate(map);
|
||||
|
||||
var _iterator = _createForOfIteratorHelper(this.items),
|
||||
_step;
|
||||
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
var pair = _step.value;
|
||||
var key = void 0,
|
||||
value = void 0;
|
||||
|
||||
if (pair instanceof Pair) {
|
||||
key = toJSON(pair.key, '', ctx);
|
||||
value = toJSON(pair.value, key, ctx);
|
||||
} else {
|
||||
key = toJSON(pair, '', ctx);
|
||||
}
|
||||
|
||||
if (map.has(key)) throw new Error('Ordered maps must not include duplicate keys');
|
||||
map.set(key, value);
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
}]);
|
||||
|
||||
return YAMLOMap;
|
||||
}(YAMLSeq);
|
||||
|
||||
_defineProperty(YAMLOMap, "tag", 'tag:yaml.org,2002:omap');
|
||||
|
||||
function parseOMap(doc, cst) {
|
||||
var pairs = parsePairs(doc, cst);
|
||||
var seenKeys = [];
|
||||
|
||||
var _iterator2 = _createForOfIteratorHelper(pairs.items),
|
||||
_step2;
|
||||
|
||||
try {
|
||||
for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
|
||||
var key = _step2.value.key;
|
||||
|
||||
if (key instanceof Scalar) {
|
||||
if (seenKeys.includes(key.value)) {
|
||||
var msg = 'Ordered maps must not include duplicate keys';
|
||||
throw new YAMLSemanticError(cst, msg);
|
||||
} else {
|
||||
seenKeys.push(key.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator2.e(err);
|
||||
} finally {
|
||||
_iterator2.f();
|
||||
}
|
||||
|
||||
return Object.assign(new YAMLOMap(), pairs);
|
||||
}
|
||||
|
||||
function createOMap(schema, iterable, ctx) {
|
||||
var pairs = createPairs(schema, iterable, ctx);
|
||||
var omap = new YAMLOMap();
|
||||
omap.items = pairs.items;
|
||||
return omap;
|
||||
}
|
||||
|
||||
var omap = {
|
||||
identify: function identify(value) {
|
||||
return value instanceof Map;
|
||||
},
|
||||
nodeClass: YAMLOMap,
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:omap',
|
||||
resolve: parseOMap,
|
||||
createNode: createOMap
|
||||
};
|
||||
|
||||
var YAMLSet = /*#__PURE__*/function (_YAMLMap) {
|
||||
_inherits(YAMLSet, _YAMLMap);
|
||||
|
||||
var _super = _createSuper(YAMLSet);
|
||||
|
||||
function YAMLSet() {
|
||||
var _this;
|
||||
|
||||
_classCallCheck(this, YAMLSet);
|
||||
|
||||
_this = _super.call(this);
|
||||
_this.tag = YAMLSet.tag;
|
||||
return _this;
|
||||
}
|
||||
|
||||
_createClass(YAMLSet, [{
|
||||
key: "add",
|
||||
value: function add(key) {
|
||||
var pair = key instanceof Pair ? key : new Pair(key);
|
||||
var prev = findPair(this.items, pair.key);
|
||||
if (!prev) this.items.push(pair);
|
||||
}
|
||||
}, {
|
||||
key: "get",
|
||||
value: function get(key, keepPair) {
|
||||
var pair = findPair(this.items, key);
|
||||
return !keepPair && pair instanceof Pair ? pair.key instanceof Scalar ? pair.key.value : pair.key : pair;
|
||||
}
|
||||
}, {
|
||||
key: "set",
|
||||
value: function set(key, value) {
|
||||
if (typeof value !== 'boolean') throw new Error("Expected boolean value for set(key, value) in a YAML set, not ".concat(_typeof(value)));
|
||||
var prev = findPair(this.items, key);
|
||||
|
||||
if (prev && !value) {
|
||||
this.items.splice(this.items.indexOf(prev), 1);
|
||||
} else if (!prev && value) {
|
||||
this.items.push(new Pair(key));
|
||||
}
|
||||
}
|
||||
}, {
|
||||
key: "toJSON",
|
||||
value: function toJSON(_, ctx) {
|
||||
return _get(_getPrototypeOf(YAMLSet.prototype), "toJSON", this).call(this, _, ctx, Set);
|
||||
}
|
||||
}, {
|
||||
key: "toString",
|
||||
value: function toString(ctx, onComment, onChompKeep) {
|
||||
if (!ctx) return JSON.stringify(this);
|
||||
if (this.hasAllNullValues()) return _get(_getPrototypeOf(YAMLSet.prototype), "toString", this).call(this, ctx, onComment, onChompKeep);else throw new Error('Set items must all have null values');
|
||||
}
|
||||
}]);
|
||||
|
||||
return YAMLSet;
|
||||
}(YAMLMap);
|
||||
|
||||
_defineProperty(YAMLSet, "tag", 'tag:yaml.org,2002:set');
|
||||
|
||||
function parseSet(doc, cst) {
|
||||
var map = resolveMap(doc, cst);
|
||||
if (!map.hasAllNullValues()) throw new YAMLSemanticError(cst, 'Set items must all have null values');
|
||||
return Object.assign(new YAMLSet(), map);
|
||||
}
|
||||
|
||||
function createSet(schema, iterable, ctx) {
|
||||
var set = new YAMLSet();
|
||||
|
||||
var _iterator = _createForOfIteratorHelper(iterable),
|
||||
_step;
|
||||
|
||||
try {
|
||||
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||||
var value = _step.value;
|
||||
set.items.push(schema.createPair(value, null, ctx));
|
||||
}
|
||||
} catch (err) {
|
||||
_iterator.e(err);
|
||||
} finally {
|
||||
_iterator.f();
|
||||
}
|
||||
|
||||
return set;
|
||||
}
|
||||
|
||||
var set = {
|
||||
identify: function identify(value) {
|
||||
return value instanceof Set;
|
||||
},
|
||||
nodeClass: YAMLSet,
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:set',
|
||||
resolve: parseSet,
|
||||
createNode: createSet
|
||||
};
|
||||
|
||||
var parseSexagesimal = function parseSexagesimal(sign, parts) {
|
||||
var n = parts.split(':').reduce(function (n, p) {
|
||||
return n * 60 + Number(p);
|
||||
}, 0);
|
||||
return sign === '-' ? -n : n;
|
||||
}; // hhhh:mm:ss.sss
|
||||
|
||||
|
||||
var stringifySexagesimal = function stringifySexagesimal(_ref) {
|
||||
var value = _ref.value;
|
||||
if (isNaN(value) || !isFinite(value)) return stringifyNumber(value);
|
||||
var sign = '';
|
||||
|
||||
if (value < 0) {
|
||||
sign = '-';
|
||||
value = Math.abs(value);
|
||||
}
|
||||
|
||||
var parts = [value % 60]; // seconds, including ms
|
||||
|
||||
if (value < 60) {
|
||||
parts.unshift(0); // at least one : is required
|
||||
} else {
|
||||
value = Math.round((value - parts[0]) / 60);
|
||||
parts.unshift(value % 60); // minutes
|
||||
|
||||
if (value >= 60) {
|
||||
value = Math.round((value - parts[0]) / 60);
|
||||
parts.unshift(value); // hours
|
||||
}
|
||||
}
|
||||
|
||||
return sign + parts.map(function (n) {
|
||||
return n < 10 ? '0' + String(n) : String(n);
|
||||
}).join(':').replace(/000000\d*$/, '') // % 60 may introduce error
|
||||
;
|
||||
};
|
||||
|
||||
var intTime = {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'number';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'TIME',
|
||||
test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+)$/,
|
||||
resolve: function resolve(str, sign, parts) {
|
||||
return parseSexagesimal(sign, parts.replace(/_/g, ''));
|
||||
},
|
||||
stringify: stringifySexagesimal
|
||||
};
|
||||
var floatTime = {
|
||||
identify: function identify(value) {
|
||||
return typeof value === 'number';
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
format: 'TIME',
|
||||
test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*)$/,
|
||||
resolve: function resolve(str, sign, parts) {
|
||||
return parseSexagesimal(sign, parts.replace(/_/g, ''));
|
||||
},
|
||||
stringify: stringifySexagesimal
|
||||
};
|
||||
var timestamp = {
|
||||
identify: function identify(value) {
|
||||
return value instanceof Date;
|
||||
},
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:timestamp',
|
||||
// If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
|
||||
// may be omitted altogether, resulting in a date format. In such a case, the time part is
|
||||
// assumed to be 00:00:00Z (start of day, UTC).
|
||||
test: RegExp('^(?:' + '([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
|
||||
'(?:(?:t|T|[ \\t]+)' + // t | T | whitespace
|
||||
'([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
|
||||
'(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
|
||||
')?' + ')$'),
|
||||
resolve: function resolve(str, year, month, day, hour, minute, second, millisec, tz) {
|
||||
if (millisec) millisec = (millisec + '00').substr(1, 3);
|
||||
var date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec || 0);
|
||||
|
||||
if (tz && tz !== 'Z') {
|
||||
var d = parseSexagesimal(tz[0], tz.slice(1));
|
||||
if (Math.abs(d) < 30) d *= 60;
|
||||
date -= 60000 * d;
|
||||
}
|
||||
|
||||
return new Date(date);
|
||||
},
|
||||
stringify: function stringify(_ref2) {
|
||||
var value = _ref2.value;
|
||||
return value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '');
|
||||
}
|
||||
};
|
||||
|
||||
/* global console, process, YAML_SILENCE_DEPRECATION_WARNINGS, YAML_SILENCE_WARNINGS */
|
||||
function shouldWarn(deprecation) {
|
||||
var env = typeof process !== 'undefined' && process.env || {};
|
||||
|
||||
if (deprecation) {
|
||||
if (typeof YAML_SILENCE_DEPRECATION_WARNINGS !== 'undefined') return !YAML_SILENCE_DEPRECATION_WARNINGS;
|
||||
return !env.YAML_SILENCE_DEPRECATION_WARNINGS;
|
||||
}
|
||||
|
||||
if (typeof YAML_SILENCE_WARNINGS !== 'undefined') return !YAML_SILENCE_WARNINGS;
|
||||
return !env.YAML_SILENCE_WARNINGS;
|
||||
}
|
||||
|
||||
function warn(warning, type) {
|
||||
if (shouldWarn(false)) {
|
||||
var emit = typeof process !== 'undefined' && process.emitWarning; // This will throw in Jest if `warning` is an Error instance due to
|
||||
// https://github.com/facebook/jest/issues/2549
|
||||
|
||||
if (emit) emit(warning, type);else {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(type ? "".concat(type, ": ").concat(warning) : warning);
|
||||
}
|
||||
}
|
||||
}
|
||||
function warnFileDeprecation(filename) {
|
||||
if (shouldWarn(true)) {
|
||||
var path = filename.replace(/.*yaml[/\\]/i, '').replace(/\.js$/, '').replace(/\\/g, '/');
|
||||
warn("The endpoint 'yaml/".concat(path, "' will be removed in a future release."), 'DeprecationWarning');
|
||||
}
|
||||
}
|
||||
var warned = {};
|
||||
function warnOptionDeprecation(name, alternative) {
|
||||
if (!warned[name] && shouldWarn(true)) {
|
||||
warned[name] = true;
|
||||
var msg = "The option '".concat(name, "' will be removed in a future release");
|
||||
msg += alternative ? ", use '".concat(alternative, "' instead.") : '.';
|
||||
warn(msg, 'DeprecationWarning');
|
||||
}
|
||||
}
|
||||
|
||||
export { warnOptionDeprecation as a, binary as b, warnFileDeprecation as c, floatTime as f, intTime as i, omap as o, pairs as p, set as s, timestamp as t, warn as w };
|
1
node_modules/yaml/browser/index.js
generated
vendored
Normal file
1
node_modules/yaml/browser/index.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require('./dist').YAML
|
2
node_modules/yaml/browser/map.js
generated
vendored
Normal file
2
node_modules/yaml/browser/map.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
module.exports = require('./dist/types').YAMLMap
|
||||
require('./dist/legacy-exports').warnFileDeprecation(__filename)
|
2
node_modules/yaml/browser/pair.js
generated
vendored
Normal file
2
node_modules/yaml/browser/pair.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
module.exports = require('./dist/types').Pair
|
||||
require('./dist/legacy-exports').warnFileDeprecation(__filename)
|
1
node_modules/yaml/browser/parse-cst.js
generated
vendored
Normal file
1
node_modules/yaml/browser/parse-cst.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require('./dist/parse-cst').parse
|
2
node_modules/yaml/browser/scalar.js
generated
vendored
Normal file
2
node_modules/yaml/browser/scalar.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
module.exports = require('./dist/types').Scalar
|
||||
require('./dist/legacy-exports').warnFileDeprecation(__filename)
|
9
node_modules/yaml/browser/schema.js
generated
vendored
Normal file
9
node_modules/yaml/browser/schema.js
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
const types = require('./dist/types')
|
||||
const util = require('./dist/util')
|
||||
|
||||
module.exports = types.Schema
|
||||
module.exports.nullOptions = types.nullOptions
|
||||
module.exports.strOptions = types.strOptions
|
||||
module.exports.stringify = util.stringifyString
|
||||
|
||||
require('./dist/legacy-exports').warnFileDeprecation(__filename)
|
2
node_modules/yaml/browser/seq.js
generated
vendored
Normal file
2
node_modules/yaml/browser/seq.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
module.exports = require('./dist/types').YAMLSeq
|
||||
require('./dist/legacy-exports').warnFileDeprecation(__filename)
|
1
node_modules/yaml/browser/types.js
generated
vendored
Normal file
1
node_modules/yaml/browser/types.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './dist/types.js'
|
8
node_modules/yaml/browser/types/binary.js
generated
vendored
Normal file
8
node_modules/yaml/browser/types/binary.js
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
'use strict'
|
||||
Object.defineProperty(exports, '__esModule', { value: true })
|
||||
|
||||
const legacy = require('../dist/legacy-exports')
|
||||
exports.binary = legacy.binary
|
||||
exports.default = [exports.binary]
|
||||
|
||||
legacy.warnFileDeprecation(__filename)
|
3
node_modules/yaml/browser/types/omap.js
generated
vendored
Normal file
3
node_modules/yaml/browser/types/omap.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
const legacy = require('../dist/legacy-exports')
|
||||
module.exports = legacy.omap
|
||||
legacy.warnFileDeprecation(__filename)
|
3
node_modules/yaml/browser/types/pairs.js
generated
vendored
Normal file
3
node_modules/yaml/browser/types/pairs.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
const legacy = require('../dist/legacy-exports')
|
||||
module.exports = legacy.pairs
|
||||
legacy.warnFileDeprecation(__filename)
|
3
node_modules/yaml/browser/types/set.js
generated
vendored
Normal file
3
node_modules/yaml/browser/types/set.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
const legacy = require('../dist/legacy-exports')
|
||||
module.exports = legacy.set
|
||||
legacy.warnFileDeprecation(__filename)
|
10
node_modules/yaml/browser/types/timestamp.js
generated
vendored
Normal file
10
node_modules/yaml/browser/types/timestamp.js
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
'use strict'
|
||||
Object.defineProperty(exports, '__esModule', { value: true })
|
||||
|
||||
const legacy = require('../dist/legacy-exports')
|
||||
exports.default = [legacy.intTime, legacy.floatTime, legacy.timestamp]
|
||||
exports.floatTime = legacy.floatTime
|
||||
exports.intTime = legacy.intTime
|
||||
exports.timestamp = legacy.timestamp
|
||||
|
||||
legacy.warnFileDeprecation(__filename)
|
1
node_modules/yaml/browser/util.js
generated
vendored
Normal file
1
node_modules/yaml/browser/util.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './dist/util.js'
|
757
node_modules/yaml/dist/Document-2cf6b08c.js
generated
vendored
Normal file
757
node_modules/yaml/dist/Document-2cf6b08c.js
generated
vendored
Normal file
|
@ -0,0 +1,757 @@
|
|||
'use strict';
|
||||
|
||||
var PlainValue = require('./PlainValue-ec8e588e.js');
|
||||
var resolveSeq = require('./resolveSeq-4a68b39b.js');
|
||||
var Schema = require('./Schema-42e9705c.js');
|
||||
|
||||
const defaultOptions = {
|
||||
anchorPrefix: 'a',
|
||||
customTags: null,
|
||||
indent: 2,
|
||||
indentSeq: true,
|
||||
keepCstNodes: false,
|
||||
keepNodeTypes: true,
|
||||
keepBlobsInJSON: true,
|
||||
mapAsMap: false,
|
||||
maxAliasCount: 100,
|
||||
prettyErrors: false,
|
||||
// TODO Set true in v2
|
||||
simpleKeys: false,
|
||||
version: '1.2'
|
||||
};
|
||||
const scalarOptions = {
|
||||
get binary() {
|
||||
return resolveSeq.binaryOptions;
|
||||
},
|
||||
|
||||
set binary(opt) {
|
||||
Object.assign(resolveSeq.binaryOptions, opt);
|
||||
},
|
||||
|
||||
get bool() {
|
||||
return resolveSeq.boolOptions;
|
||||
},
|
||||
|
||||
set bool(opt) {
|
||||
Object.assign(resolveSeq.boolOptions, opt);
|
||||
},
|
||||
|
||||
get int() {
|
||||
return resolveSeq.intOptions;
|
||||
},
|
||||
|
||||
set int(opt) {
|
||||
Object.assign(resolveSeq.intOptions, opt);
|
||||
},
|
||||
|
||||
get null() {
|
||||
return resolveSeq.nullOptions;
|
||||
},
|
||||
|
||||
set null(opt) {
|
||||
Object.assign(resolveSeq.nullOptions, opt);
|
||||
},
|
||||
|
||||
get str() {
|
||||
return resolveSeq.strOptions;
|
||||
},
|
||||
|
||||
set str(opt) {
|
||||
Object.assign(resolveSeq.strOptions, opt);
|
||||
}
|
||||
|
||||
};
|
||||
const documentOptions = {
|
||||
'1.0': {
|
||||
schema: 'yaml-1.1',
|
||||
merge: true,
|
||||
tagPrefixes: [{
|
||||
handle: '!',
|
||||
prefix: PlainValue.defaultTagPrefix
|
||||
}, {
|
||||
handle: '!!',
|
||||
prefix: 'tag:private.yaml.org,2002:'
|
||||
}]
|
||||
},
|
||||
'1.1': {
|
||||
schema: 'yaml-1.1',
|
||||
merge: true,
|
||||
tagPrefixes: [{
|
||||
handle: '!',
|
||||
prefix: '!'
|
||||
}, {
|
||||
handle: '!!',
|
||||
prefix: PlainValue.defaultTagPrefix
|
||||
}]
|
||||
},
|
||||
'1.2': {
|
||||
schema: 'core',
|
||||
merge: false,
|
||||
tagPrefixes: [{
|
||||
handle: '!',
|
||||
prefix: '!'
|
||||
}, {
|
||||
handle: '!!',
|
||||
prefix: PlainValue.defaultTagPrefix
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
||||
function stringifyTag(doc, tag) {
|
||||
if ((doc.version || doc.options.version) === '1.0') {
|
||||
const priv = tag.match(/^tag:private\.yaml\.org,2002:([^:/]+)$/);
|
||||
if (priv) return '!' + priv[1];
|
||||
const vocab = tag.match(/^tag:([a-zA-Z0-9-]+)\.yaml\.org,2002:(.*)/);
|
||||
return vocab ? `!${vocab[1]}/${vocab[2]}` : `!${tag.replace(/^tag:/, '')}`;
|
||||
}
|
||||
|
||||
let p = doc.tagPrefixes.find(p => tag.indexOf(p.prefix) === 0);
|
||||
|
||||
if (!p) {
|
||||
const dtp = doc.getDefaults().tagPrefixes;
|
||||
p = dtp && dtp.find(p => tag.indexOf(p.prefix) === 0);
|
||||
}
|
||||
|
||||
if (!p) return tag[0] === '!' ? tag : `!<${tag}>`;
|
||||
const suffix = tag.substr(p.prefix.length).replace(/[!,[\]{}]/g, ch => ({
|
||||
'!': '%21',
|
||||
',': '%2C',
|
||||
'[': '%5B',
|
||||
']': '%5D',
|
||||
'{': '%7B',
|
||||
'}': '%7D'
|
||||
})[ch]);
|
||||
return p.handle + suffix;
|
||||
}
|
||||
|
||||
function getTagObject(tags, item) {
|
||||
if (item instanceof resolveSeq.Alias) return resolveSeq.Alias;
|
||||
|
||||
if (item.tag) {
|
||||
const match = tags.filter(t => t.tag === item.tag);
|
||||
if (match.length > 0) return match.find(t => t.format === item.format) || match[0];
|
||||
}
|
||||
|
||||
let tagObj, obj;
|
||||
|
||||
if (item instanceof resolveSeq.Scalar) {
|
||||
obj = item.value; // TODO: deprecate/remove class check
|
||||
|
||||
const match = tags.filter(t => t.identify && t.identify(obj) || t.class && obj instanceof t.class);
|
||||
tagObj = match.find(t => t.format === item.format) || match.find(t => !t.format);
|
||||
} else {
|
||||
obj = item;
|
||||
tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
|
||||
}
|
||||
|
||||
if (!tagObj) {
|
||||
const name = obj && obj.constructor ? obj.constructor.name : typeof obj;
|
||||
throw new Error(`Tag not resolved for ${name} value`);
|
||||
}
|
||||
|
||||
return tagObj;
|
||||
} // needs to be called before value stringifier to allow for circular anchor refs
|
||||
|
||||
|
||||
function stringifyProps(node, tagObj, {
|
||||
anchors,
|
||||
doc
|
||||
}) {
|
||||
const props = [];
|
||||
const anchor = doc.anchors.getName(node);
|
||||
|
||||
if (anchor) {
|
||||
anchors[anchor] = node;
|
||||
props.push(`&${anchor}`);
|
||||
}
|
||||
|
||||
if (node.tag) {
|
||||
props.push(stringifyTag(doc, node.tag));
|
||||
} else if (!tagObj.default) {
|
||||
props.push(stringifyTag(doc, tagObj.tag));
|
||||
}
|
||||
|
||||
return props.join(' ');
|
||||
}
|
||||
|
||||
function stringify(item, ctx, onComment, onChompKeep) {
|
||||
const {
|
||||
anchors,
|
||||
schema
|
||||
} = ctx.doc;
|
||||
let tagObj;
|
||||
|
||||
if (!(item instanceof resolveSeq.Node)) {
|
||||
const createCtx = {
|
||||
aliasNodes: [],
|
||||
onTagObj: o => tagObj = o,
|
||||
prevObjects: new Map()
|
||||
};
|
||||
item = schema.createNode(item, true, null, createCtx);
|
||||
|
||||
for (const alias of createCtx.aliasNodes) {
|
||||
alias.source = alias.source.node;
|
||||
let name = anchors.getName(alias.source);
|
||||
|
||||
if (!name) {
|
||||
name = anchors.newName();
|
||||
anchors.map[name] = alias.source;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (item instanceof resolveSeq.Pair) return item.toString(ctx, onComment, onChompKeep);
|
||||
if (!tagObj) tagObj = getTagObject(schema.tags, item);
|
||||
const props = stringifyProps(item, tagObj, ctx);
|
||||
if (props.length > 0) ctx.indentAtStart = (ctx.indentAtStart || 0) + props.length + 1;
|
||||
const str = typeof tagObj.stringify === 'function' ? tagObj.stringify(item, ctx, onComment, onChompKeep) : item instanceof resolveSeq.Scalar ? resolveSeq.stringifyString(item, ctx, onComment, onChompKeep) : item.toString(ctx, onComment, onChompKeep);
|
||||
if (!props) return str;
|
||||
return item instanceof resolveSeq.Scalar || str[0] === '{' || str[0] === '[' ? `${props} ${str}` : `${props}\n${ctx.indent}${str}`;
|
||||
}
|
||||
|
||||
class Anchors {
|
||||
static validAnchorNode(node) {
|
||||
return node instanceof resolveSeq.Scalar || node instanceof resolveSeq.YAMLSeq || node instanceof resolveSeq.YAMLMap;
|
||||
}
|
||||
|
||||
constructor(prefix) {
|
||||
PlainValue._defineProperty(this, "map", {});
|
||||
|
||||
this.prefix = prefix;
|
||||
}
|
||||
|
||||
createAlias(node, name) {
|
||||
this.setAnchor(node, name);
|
||||
return new resolveSeq.Alias(node);
|
||||
}
|
||||
|
||||
createMergePair(...sources) {
|
||||
const merge = new resolveSeq.Merge();
|
||||
merge.value.items = sources.map(s => {
|
||||
if (s instanceof resolveSeq.Alias) {
|
||||
if (s.source instanceof resolveSeq.YAMLMap) return s;
|
||||
} else if (s instanceof resolveSeq.YAMLMap) {
|
||||
return this.createAlias(s);
|
||||
}
|
||||
|
||||
throw new Error('Merge sources must be Map nodes or their Aliases');
|
||||
});
|
||||
return merge;
|
||||
}
|
||||
|
||||
getName(node) {
|
||||
const {
|
||||
map
|
||||
} = this;
|
||||
return Object.keys(map).find(a => map[a] === node);
|
||||
}
|
||||
|
||||
getNames() {
|
||||
return Object.keys(this.map);
|
||||
}
|
||||
|
||||
getNode(name) {
|
||||
return this.map[name];
|
||||
}
|
||||
|
||||
newName(prefix) {
|
||||
if (!prefix) prefix = this.prefix;
|
||||
const names = Object.keys(this.map);
|
||||
|
||||
for (let i = 1; true; ++i) {
|
||||
const name = `${prefix}${i}`;
|
||||
if (!names.includes(name)) return name;
|
||||
}
|
||||
} // During parsing, map & aliases contain CST nodes
|
||||
|
||||
|
||||
resolveNodes() {
|
||||
const {
|
||||
map,
|
||||
_cstAliases
|
||||
} = this;
|
||||
Object.keys(map).forEach(a => {
|
||||
map[a] = map[a].resolved;
|
||||
});
|
||||
|
||||
_cstAliases.forEach(a => {
|
||||
a.source = a.source.resolved;
|
||||
});
|
||||
|
||||
delete this._cstAliases;
|
||||
}
|
||||
|
||||
setAnchor(node, name) {
|
||||
if (node != null && !Anchors.validAnchorNode(node)) {
|
||||
throw new Error('Anchors may only be set for Scalar, Seq and Map nodes');
|
||||
}
|
||||
|
||||
if (name && /[\x00-\x19\s,[\]{}]/.test(name)) {
|
||||
throw new Error('Anchor names must not contain whitespace or control characters');
|
||||
}
|
||||
|
||||
const {
|
||||
map
|
||||
} = this;
|
||||
const prev = node && Object.keys(map).find(a => map[a] === node);
|
||||
|
||||
if (prev) {
|
||||
if (!name) {
|
||||
return prev;
|
||||
} else if (prev !== name) {
|
||||
delete map[prev];
|
||||
map[name] = node;
|
||||
}
|
||||
} else {
|
||||
if (!name) {
|
||||
if (!node) return null;
|
||||
name = this.newName();
|
||||
}
|
||||
|
||||
map[name] = node;
|
||||
}
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const visit = (node, tags) => {
|
||||
if (node && typeof node === 'object') {
|
||||
const {
|
||||
tag
|
||||
} = node;
|
||||
|
||||
if (node instanceof resolveSeq.Collection) {
|
||||
if (tag) tags[tag] = true;
|
||||
node.items.forEach(n => visit(n, tags));
|
||||
} else if (node instanceof resolveSeq.Pair) {
|
||||
visit(node.key, tags);
|
||||
visit(node.value, tags);
|
||||
} else if (node instanceof resolveSeq.Scalar) {
|
||||
if (tag) tags[tag] = true;
|
||||
}
|
||||
}
|
||||
|
||||
return tags;
|
||||
};
|
||||
|
||||
const listTagNames = node => Object.keys(visit(node, {}));
|
||||
|
||||
function parseContents(doc, contents) {
|
||||
const comments = {
|
||||
before: [],
|
||||
after: []
|
||||
};
|
||||
let body = undefined;
|
||||
let spaceBefore = false;
|
||||
|
||||
for (const node of contents) {
|
||||
if (node.valueRange) {
|
||||
if (body !== undefined) {
|
||||
const msg = 'Document contains trailing content not separated by a ... or --- line';
|
||||
doc.errors.push(new PlainValue.YAMLSyntaxError(node, msg));
|
||||
break;
|
||||
}
|
||||
|
||||
const res = resolveSeq.resolveNode(doc, node);
|
||||
|
||||
if (spaceBefore) {
|
||||
res.spaceBefore = true;
|
||||
spaceBefore = false;
|
||||
}
|
||||
|
||||
body = res;
|
||||
} else if (node.comment !== null) {
|
||||
const cc = body === undefined ? comments.before : comments.after;
|
||||
cc.push(node.comment);
|
||||
} else if (node.type === PlainValue.Type.BLANK_LINE) {
|
||||
spaceBefore = true;
|
||||
|
||||
if (body === undefined && comments.before.length > 0 && !doc.commentBefore) {
|
||||
// space-separated comments at start are parsed as document comments
|
||||
doc.commentBefore = comments.before.join('\n');
|
||||
comments.before = [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
doc.contents = body || null;
|
||||
|
||||
if (!body) {
|
||||
doc.comment = comments.before.concat(comments.after).join('\n') || null;
|
||||
} else {
|
||||
const cb = comments.before.join('\n');
|
||||
|
||||
if (cb) {
|
||||
const cbNode = body instanceof resolveSeq.Collection && body.items[0] ? body.items[0] : body;
|
||||
cbNode.commentBefore = cbNode.commentBefore ? `${cb}\n${cbNode.commentBefore}` : cb;
|
||||
}
|
||||
|
||||
doc.comment = comments.after.join('\n') || null;
|
||||
}
|
||||
}
|
||||
|
||||
function resolveTagDirective({
|
||||
tagPrefixes
|
||||
}, directive) {
|
||||
const [handle, prefix] = directive.parameters;
|
||||
|
||||
if (!handle || !prefix) {
|
||||
const msg = 'Insufficient parameters given for %TAG directive';
|
||||
throw new PlainValue.YAMLSemanticError(directive, msg);
|
||||
}
|
||||
|
||||
if (tagPrefixes.some(p => p.handle === handle)) {
|
||||
const msg = 'The %TAG directive must only be given at most once per handle in the same document.';
|
||||
throw new PlainValue.YAMLSemanticError(directive, msg);
|
||||
}
|
||||
|
||||
return {
|
||||
handle,
|
||||
prefix
|
||||
};
|
||||
}
|
||||
|
||||
function resolveYamlDirective(doc, directive) {
|
||||
let [version] = directive.parameters;
|
||||
if (directive.name === 'YAML:1.0') version = '1.0';
|
||||
|
||||
if (!version) {
|
||||
const msg = 'Insufficient parameters given for %YAML directive';
|
||||
throw new PlainValue.YAMLSemanticError(directive, msg);
|
||||
}
|
||||
|
||||
if (!documentOptions[version]) {
|
||||
const v0 = doc.version || doc.options.version;
|
||||
const msg = `Document will be parsed as YAML ${v0} rather than YAML ${version}`;
|
||||
doc.warnings.push(new PlainValue.YAMLWarning(directive, msg));
|
||||
}
|
||||
|
||||
return version;
|
||||
}
|
||||
|
||||
function parseDirectives(doc, directives, prevDoc) {
|
||||
const directiveComments = [];
|
||||
let hasDirectives = false;
|
||||
|
||||
for (const directive of directives) {
|
||||
const {
|
||||
comment,
|
||||
name
|
||||
} = directive;
|
||||
|
||||
switch (name) {
|
||||
case 'TAG':
|
||||
try {
|
||||
doc.tagPrefixes.push(resolveTagDirective(doc, directive));
|
||||
} catch (error) {
|
||||
doc.errors.push(error);
|
||||
}
|
||||
|
||||
hasDirectives = true;
|
||||
break;
|
||||
|
||||
case 'YAML':
|
||||
case 'YAML:1.0':
|
||||
if (doc.version) {
|
||||
const msg = 'The %YAML directive must only be given at most once per document.';
|
||||
doc.errors.push(new PlainValue.YAMLSemanticError(directive, msg));
|
||||
}
|
||||
|
||||
try {
|
||||
doc.version = resolveYamlDirective(doc, directive);
|
||||
} catch (error) {
|
||||
doc.errors.push(error);
|
||||
}
|
||||
|
||||
hasDirectives = true;
|
||||
break;
|
||||
|
||||
default:
|
||||
if (name) {
|
||||
const msg = `YAML only supports %TAG and %YAML directives, and not %${name}`;
|
||||
doc.warnings.push(new PlainValue.YAMLWarning(directive, msg));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (comment) directiveComments.push(comment);
|
||||
}
|
||||
|
||||
if (prevDoc && !hasDirectives && '1.1' === (doc.version || prevDoc.version || doc.options.version)) {
|
||||
const copyTagPrefix = ({
|
||||
handle,
|
||||
prefix
|
||||
}) => ({
|
||||
handle,
|
||||
prefix
|
||||
});
|
||||
|
||||
doc.tagPrefixes = prevDoc.tagPrefixes.map(copyTagPrefix);
|
||||
doc.version = prevDoc.version;
|
||||
}
|
||||
|
||||
doc.commentBefore = directiveComments.join('\n') || null;
|
||||
}
|
||||
|
||||
function assertCollection(contents) {
|
||||
if (contents instanceof resolveSeq.Collection) return true;
|
||||
throw new Error('Expected a YAML collection as document contents');
|
||||
}
|
||||
|
||||
class Document {
|
||||
constructor(options) {
|
||||
this.anchors = new Anchors(options.anchorPrefix);
|
||||
this.commentBefore = null;
|
||||
this.comment = null;
|
||||
this.contents = null;
|
||||
this.directivesEndMarker = null;
|
||||
this.errors = [];
|
||||
this.options = options;
|
||||
this.schema = null;
|
||||
this.tagPrefixes = [];
|
||||
this.version = null;
|
||||
this.warnings = [];
|
||||
}
|
||||
|
||||
add(value) {
|
||||
assertCollection(this.contents);
|
||||
return this.contents.add(value);
|
||||
}
|
||||
|
||||
addIn(path, value) {
|
||||
assertCollection(this.contents);
|
||||
this.contents.addIn(path, value);
|
||||
}
|
||||
|
||||
delete(key) {
|
||||
assertCollection(this.contents);
|
||||
return this.contents.delete(key);
|
||||
}
|
||||
|
||||
deleteIn(path) {
|
||||
if (resolveSeq.isEmptyPath(path)) {
|
||||
if (this.contents == null) return false;
|
||||
this.contents = null;
|
||||
return true;
|
||||
}
|
||||
|
||||
assertCollection(this.contents);
|
||||
return this.contents.deleteIn(path);
|
||||
}
|
||||
|
||||
getDefaults() {
|
||||
return Document.defaults[this.version] || Document.defaults[this.options.version] || {};
|
||||
}
|
||||
|
||||
get(key, keepScalar) {
|
||||
return this.contents instanceof resolveSeq.Collection ? this.contents.get(key, keepScalar) : undefined;
|
||||
}
|
||||
|
||||
getIn(path, keepScalar) {
|
||||
if (resolveSeq.isEmptyPath(path)) return !keepScalar && this.contents instanceof resolveSeq.Scalar ? this.contents.value : this.contents;
|
||||
return this.contents instanceof resolveSeq.Collection ? this.contents.getIn(path, keepScalar) : undefined;
|
||||
}
|
||||
|
||||
has(key) {
|
||||
return this.contents instanceof resolveSeq.Collection ? this.contents.has(key) : false;
|
||||
}
|
||||
|
||||
hasIn(path) {
|
||||
if (resolveSeq.isEmptyPath(path)) return this.contents !== undefined;
|
||||
return this.contents instanceof resolveSeq.Collection ? this.contents.hasIn(path) : false;
|
||||
}
|
||||
|
||||
set(key, value) {
|
||||
assertCollection(this.contents);
|
||||
this.contents.set(key, value);
|
||||
}
|
||||
|
||||
setIn(path, value) {
|
||||
if (resolveSeq.isEmptyPath(path)) this.contents = value;else {
|
||||
assertCollection(this.contents);
|
||||
this.contents.setIn(path, value);
|
||||
}
|
||||
}
|
||||
|
||||
setSchema(id, customTags) {
|
||||
if (!id && !customTags && this.schema) return;
|
||||
if (typeof id === 'number') id = id.toFixed(1);
|
||||
|
||||
if (id === '1.0' || id === '1.1' || id === '1.2') {
|
||||
if (this.version) this.version = id;else this.options.version = id;
|
||||
delete this.options.schema;
|
||||
} else if (id && typeof id === 'string') {
|
||||
this.options.schema = id;
|
||||
}
|
||||
|
||||
if (Array.isArray(customTags)) this.options.customTags = customTags;
|
||||
const opt = Object.assign({}, this.getDefaults(), this.options);
|
||||
this.schema = new Schema.Schema(opt);
|
||||
}
|
||||
|
||||
parse(node, prevDoc) {
|
||||
if (this.options.keepCstNodes) this.cstNode = node;
|
||||
if (this.options.keepNodeTypes) this.type = 'DOCUMENT';
|
||||
const {
|
||||
directives = [],
|
||||
contents = [],
|
||||
directivesEndMarker,
|
||||
error,
|
||||
valueRange
|
||||
} = node;
|
||||
|
||||
if (error) {
|
||||
if (!error.source) error.source = this;
|
||||
this.errors.push(error);
|
||||
}
|
||||
|
||||
parseDirectives(this, directives, prevDoc);
|
||||
if (directivesEndMarker) this.directivesEndMarker = true;
|
||||
this.range = valueRange ? [valueRange.start, valueRange.end] : null;
|
||||
this.setSchema();
|
||||
this.anchors._cstAliases = [];
|
||||
parseContents(this, contents);
|
||||
this.anchors.resolveNodes();
|
||||
|
||||
if (this.options.prettyErrors) {
|
||||
for (const error of this.errors) if (error instanceof PlainValue.YAMLError) error.makePretty();
|
||||
|
||||
for (const warn of this.warnings) if (warn instanceof PlainValue.YAMLError) warn.makePretty();
|
||||
}
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
listNonDefaultTags() {
|
||||
return listTagNames(this.contents).filter(t => t.indexOf(Schema.Schema.defaultPrefix) !== 0);
|
||||
}
|
||||
|
||||
setTagPrefix(handle, prefix) {
|
||||
if (handle[0] !== '!' || handle[handle.length - 1] !== '!') throw new Error('Handle must start and end with !');
|
||||
|
||||
if (prefix) {
|
||||
const prev = this.tagPrefixes.find(p => p.handle === handle);
|
||||
if (prev) prev.prefix = prefix;else this.tagPrefixes.push({
|
||||
handle,
|
||||
prefix
|
||||
});
|
||||
} else {
|
||||
this.tagPrefixes = this.tagPrefixes.filter(p => p.handle !== handle);
|
||||
}
|
||||
}
|
||||
|
||||
toJSON(arg, onAnchor) {
|
||||
const {
|
||||
keepBlobsInJSON,
|
||||
mapAsMap,
|
||||
maxAliasCount
|
||||
} = this.options;
|
||||
const keep = keepBlobsInJSON && (typeof arg !== 'string' || !(this.contents instanceof resolveSeq.Scalar));
|
||||
const ctx = {
|
||||
doc: this,
|
||||
indentStep: ' ',
|
||||
keep,
|
||||
mapAsMap: keep && !!mapAsMap,
|
||||
maxAliasCount,
|
||||
stringify // Requiring directly in Pair would create circular dependencies
|
||||
|
||||
};
|
||||
const anchorNames = Object.keys(this.anchors.map);
|
||||
if (anchorNames.length > 0) ctx.anchors = new Map(anchorNames.map(name => [this.anchors.map[name], {
|
||||
alias: [],
|
||||
aliasCount: 0,
|
||||
count: 1
|
||||
}]));
|
||||
const res = resolveSeq.toJSON(this.contents, arg, ctx);
|
||||
if (typeof onAnchor === 'function' && ctx.anchors) for (const {
|
||||
count,
|
||||
res
|
||||
} of ctx.anchors.values()) onAnchor(res, count);
|
||||
return res;
|
||||
}
|
||||
|
||||
toString() {
|
||||
if (this.errors.length > 0) throw new Error('Document with errors cannot be stringified');
|
||||
const indentSize = this.options.indent;
|
||||
|
||||
if (!Number.isInteger(indentSize) || indentSize <= 0) {
|
||||
const s = JSON.stringify(indentSize);
|
||||
throw new Error(`"indent" option must be a positive integer, not ${s}`);
|
||||
}
|
||||
|
||||
this.setSchema();
|
||||
const lines = [];
|
||||
let hasDirectives = false;
|
||||
|
||||
if (this.version) {
|
||||
let vd = '%YAML 1.2';
|
||||
|
||||
if (this.schema.name === 'yaml-1.1') {
|
||||
if (this.version === '1.0') vd = '%YAML:1.0';else if (this.version === '1.1') vd = '%YAML 1.1';
|
||||
}
|
||||
|
||||
lines.push(vd);
|
||||
hasDirectives = true;
|
||||
}
|
||||
|
||||
const tagNames = this.listNonDefaultTags();
|
||||
this.tagPrefixes.forEach(({
|
||||
handle,
|
||||
prefix
|
||||
}) => {
|
||||
if (tagNames.some(t => t.indexOf(prefix) === 0)) {
|
||||
lines.push(`%TAG ${handle} ${prefix}`);
|
||||
hasDirectives = true;
|
||||
}
|
||||
});
|
||||
if (hasDirectives || this.directivesEndMarker) lines.push('---');
|
||||
|
||||
if (this.commentBefore) {
|
||||
if (hasDirectives || !this.directivesEndMarker) lines.unshift('');
|
||||
lines.unshift(this.commentBefore.replace(/^/gm, '#'));
|
||||
}
|
||||
|
||||
const ctx = {
|
||||
anchors: {},
|
||||
doc: this,
|
||||
indent: '',
|
||||
indentStep: ' '.repeat(indentSize),
|
||||
stringify // Requiring directly in nodes would create circular dependencies
|
||||
|
||||
};
|
||||
let chompKeep = false;
|
||||
let contentComment = null;
|
||||
|
||||
if (this.contents) {
|
||||
if (this.contents instanceof resolveSeq.Node) {
|
||||
if (this.contents.spaceBefore && (hasDirectives || this.directivesEndMarker)) lines.push('');
|
||||
if (this.contents.commentBefore) lines.push(this.contents.commentBefore.replace(/^/gm, '#')); // top-level block scalars need to be indented if followed by a comment
|
||||
|
||||
ctx.forceBlockIndent = !!this.comment;
|
||||
contentComment = this.contents.comment;
|
||||
}
|
||||
|
||||
const onChompKeep = contentComment ? null : () => chompKeep = true;
|
||||
const body = stringify(this.contents, ctx, () => contentComment = null, onChompKeep);
|
||||
lines.push(resolveSeq.addComment(body, '', contentComment));
|
||||
} else if (this.contents !== undefined) {
|
||||
lines.push(stringify(this.contents, ctx));
|
||||
}
|
||||
|
||||
if (this.comment) {
|
||||
if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') lines.push('');
|
||||
lines.push(this.comment.replace(/^/gm, '#'));
|
||||
}
|
||||
|
||||
return lines.join('\n') + '\n';
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
PlainValue._defineProperty(Document, "defaults", documentOptions);
|
||||
|
||||
exports.Document = Document;
|
||||
exports.defaultOptions = defaultOptions;
|
||||
exports.scalarOptions = scalarOptions;
|
876
node_modules/yaml/dist/PlainValue-ec8e588e.js
generated
vendored
Normal file
876
node_modules/yaml/dist/PlainValue-ec8e588e.js
generated
vendored
Normal file
|
@ -0,0 +1,876 @@
|
|||
'use strict';
|
||||
|
||||
const Char = {
|
||||
ANCHOR: '&',
|
||||
COMMENT: '#',
|
||||
TAG: '!',
|
||||
DIRECTIVES_END: '-',
|
||||
DOCUMENT_END: '.'
|
||||
};
|
||||
const Type = {
|
||||
ALIAS: 'ALIAS',
|
||||
BLANK_LINE: 'BLANK_LINE',
|
||||
BLOCK_FOLDED: 'BLOCK_FOLDED',
|
||||
BLOCK_LITERAL: 'BLOCK_LITERAL',
|
||||
COMMENT: 'COMMENT',
|
||||
DIRECTIVE: 'DIRECTIVE',
|
||||
DOCUMENT: 'DOCUMENT',
|
||||
FLOW_MAP: 'FLOW_MAP',
|
||||
FLOW_SEQ: 'FLOW_SEQ',
|
||||
MAP: 'MAP',
|
||||
MAP_KEY: 'MAP_KEY',
|
||||
MAP_VALUE: 'MAP_VALUE',
|
||||
PLAIN: 'PLAIN',
|
||||
QUOTE_DOUBLE: 'QUOTE_DOUBLE',
|
||||
QUOTE_SINGLE: 'QUOTE_SINGLE',
|
||||
SEQ: 'SEQ',
|
||||
SEQ_ITEM: 'SEQ_ITEM'
|
||||
};
|
||||
const defaultTagPrefix = 'tag:yaml.org,2002:';
|
||||
const defaultTags = {
|
||||
MAP: 'tag:yaml.org,2002:map',
|
||||
SEQ: 'tag:yaml.org,2002:seq',
|
||||
STR: 'tag:yaml.org,2002:str'
|
||||
};
|
||||
|
||||
function findLineStarts(src) {
|
||||
const ls = [0];
|
||||
let offset = src.indexOf('\n');
|
||||
|
||||
while (offset !== -1) {
|
||||
offset += 1;
|
||||
ls.push(offset);
|
||||
offset = src.indexOf('\n', offset);
|
||||
}
|
||||
|
||||
return ls;
|
||||
}
|
||||
|
||||
function getSrcInfo(cst) {
|
||||
let lineStarts, src;
|
||||
|
||||
if (typeof cst === 'string') {
|
||||
lineStarts = findLineStarts(cst);
|
||||
src = cst;
|
||||
} else {
|
||||
if (Array.isArray(cst)) cst = cst[0];
|
||||
|
||||
if (cst && cst.context) {
|
||||
if (!cst.lineStarts) cst.lineStarts = findLineStarts(cst.context.src);
|
||||
lineStarts = cst.lineStarts;
|
||||
src = cst.context.src;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
lineStarts,
|
||||
src
|
||||
};
|
||||
}
|
||||
/**
|
||||
* @typedef {Object} LinePos - One-indexed position in the source
|
||||
* @property {number} line
|
||||
* @property {number} col
|
||||
*/
|
||||
|
||||
/**
|
||||
* Determine the line/col position matching a character offset.
|
||||
*
|
||||
* Accepts a source string or a CST document as the second parameter. With
|
||||
* the latter, starting indices for lines are cached in the document as
|
||||
* `lineStarts: number[]`.
|
||||
*
|
||||
* Returns a one-indexed `{ line, col }` location if found, or
|
||||
* `undefined` otherwise.
|
||||
*
|
||||
* @param {number} offset
|
||||
* @param {string|Document|Document[]} cst
|
||||
* @returns {?LinePos}
|
||||
*/
|
||||
|
||||
|
||||
function getLinePos(offset, cst) {
|
||||
if (typeof offset !== 'number' || offset < 0) return null;
|
||||
const {
|
||||
lineStarts,
|
||||
src
|
||||
} = getSrcInfo(cst);
|
||||
if (!lineStarts || !src || offset > src.length) return null;
|
||||
|
||||
for (let i = 0; i < lineStarts.length; ++i) {
|
||||
const start = lineStarts[i];
|
||||
|
||||
if (offset < start) {
|
||||
return {
|
||||
line: i,
|
||||
col: offset - lineStarts[i - 1] + 1
|
||||
};
|
||||
}
|
||||
|
||||
if (offset === start) return {
|
||||
line: i + 1,
|
||||
col: 1
|
||||
};
|
||||
}
|
||||
|
||||
const line = lineStarts.length;
|
||||
return {
|
||||
line,
|
||||
col: offset - lineStarts[line - 1] + 1
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Get a specified line from the source.
|
||||
*
|
||||
* Accepts a source string or a CST document as the second parameter. With
|
||||
* the latter, starting indices for lines are cached in the document as
|
||||
* `lineStarts: number[]`.
|
||||
*
|
||||
* Returns the line as a string if found, or `null` otherwise.
|
||||
*
|
||||
* @param {number} line One-indexed line number
|
||||
* @param {string|Document|Document[]} cst
|
||||
* @returns {?string}
|
||||
*/
|
||||
|
||||
function getLine(line, cst) {
|
||||
const {
|
||||
lineStarts,
|
||||
src
|
||||
} = getSrcInfo(cst);
|
||||
if (!lineStarts || !(line >= 1) || line > lineStarts.length) return null;
|
||||
const start = lineStarts[line - 1];
|
||||
let end = lineStarts[line]; // undefined for last line; that's ok for slice()
|
||||
|
||||
while (end && end > start && src[end - 1] === '\n') --end;
|
||||
|
||||
return src.slice(start, end);
|
||||
}
|
||||
/**
|
||||
* Pretty-print the starting line from the source indicated by the range `pos`
|
||||
*
|
||||
* Trims output to `maxWidth` chars while keeping the starting column visible,
|
||||
* using `…` at either end to indicate dropped characters.
|
||||
*
|
||||
* Returns a two-line string (or `null`) with `\n` as separator; the second line
|
||||
* will hold appropriately indented `^` marks indicating the column range.
|
||||
*
|
||||
* @param {Object} pos
|
||||
* @param {LinePos} pos.start
|
||||
* @param {LinePos} [pos.end]
|
||||
* @param {string|Document|Document[]*} cst
|
||||
* @param {number} [maxWidth=80]
|
||||
* @returns {?string}
|
||||
*/
|
||||
|
||||
function getPrettyContext({
|
||||
start,
|
||||
end
|
||||
}, cst, maxWidth = 80) {
|
||||
let src = getLine(start.line, cst);
|
||||
if (!src) return null;
|
||||
let {
|
||||
col
|
||||
} = start;
|
||||
|
||||
if (src.length > maxWidth) {
|
||||
if (col <= maxWidth - 10) {
|
||||
src = src.substr(0, maxWidth - 1) + '…';
|
||||
} else {
|
||||
const halfWidth = Math.round(maxWidth / 2);
|
||||
if (src.length > col + halfWidth) src = src.substr(0, col + halfWidth - 1) + '…';
|
||||
col -= src.length - maxWidth;
|
||||
src = '…' + src.substr(1 - maxWidth);
|
||||
}
|
||||
}
|
||||
|
||||
let errLen = 1;
|
||||
let errEnd = '';
|
||||
|
||||
if (end) {
|
||||
if (end.line === start.line && col + (end.col - start.col) <= maxWidth + 1) {
|
||||
errLen = end.col - start.col;
|
||||
} else {
|
||||
errLen = Math.min(src.length + 1, maxWidth) - col;
|
||||
errEnd = '…';
|
||||
}
|
||||
}
|
||||
|
||||
const offset = col > 1 ? ' '.repeat(col - 1) : '';
|
||||
const err = '^'.repeat(errLen);
|
||||
return `${src}\n${offset}${err}${errEnd}`;
|
||||
}
|
||||
|
||||
class Range {
|
||||
static copy(orig) {
|
||||
return new Range(orig.start, orig.end);
|
||||
}
|
||||
|
||||
constructor(start, end) {
|
||||
this.start = start;
|
||||
this.end = end || start;
|
||||
}
|
||||
|
||||
isEmpty() {
|
||||
return typeof this.start !== 'number' || !this.end || this.end <= this.start;
|
||||
}
|
||||
/**
|
||||
* Set `origStart` and `origEnd` to point to the original source range for
|
||||
* this node, which may differ due to dropped CR characters.
|
||||
*
|
||||
* @param {number[]} cr - Positions of dropped CR characters
|
||||
* @param {number} offset - Starting index of `cr` from the last call
|
||||
* @returns {number} - The next offset, matching the one found for `origStart`
|
||||
*/
|
||||
|
||||
|
||||
setOrigRange(cr, offset) {
|
||||
const {
|
||||
start,
|
||||
end
|
||||
} = this;
|
||||
|
||||
if (cr.length === 0 || end <= cr[0]) {
|
||||
this.origStart = start;
|
||||
this.origEnd = end;
|
||||
return offset;
|
||||
}
|
||||
|
||||
let i = offset;
|
||||
|
||||
while (i < cr.length) {
|
||||
if (cr[i] > start) break;else ++i;
|
||||
}
|
||||
|
||||
this.origStart = start + i;
|
||||
const nextOffset = i;
|
||||
|
||||
while (i < cr.length) {
|
||||
// if end was at \n, it should now be at \r
|
||||
if (cr[i] >= end) break;else ++i;
|
||||
}
|
||||
|
||||
this.origEnd = end + i;
|
||||
return nextOffset;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** Root class of all nodes */
|
||||
|
||||
class Node {
|
||||
static addStringTerminator(src, offset, str) {
|
||||
if (str[str.length - 1] === '\n') return str;
|
||||
const next = Node.endOfWhiteSpace(src, offset);
|
||||
return next >= src.length || src[next] === '\n' ? str + '\n' : str;
|
||||
} // ^(---|...)
|
||||
|
||||
|
||||
static atDocumentBoundary(src, offset, sep) {
|
||||
const ch0 = src[offset];
|
||||
if (!ch0) return true;
|
||||
const prev = src[offset - 1];
|
||||
if (prev && prev !== '\n') return false;
|
||||
|
||||
if (sep) {
|
||||
if (ch0 !== sep) return false;
|
||||
} else {
|
||||
if (ch0 !== Char.DIRECTIVES_END && ch0 !== Char.DOCUMENT_END) return false;
|
||||
}
|
||||
|
||||
const ch1 = src[offset + 1];
|
||||
const ch2 = src[offset + 2];
|
||||
if (ch1 !== ch0 || ch2 !== ch0) return false;
|
||||
const ch3 = src[offset + 3];
|
||||
return !ch3 || ch3 === '\n' || ch3 === '\t' || ch3 === ' ';
|
||||
}
|
||||
|
||||
static endOfIdentifier(src, offset) {
|
||||
let ch = src[offset];
|
||||
const isVerbatim = ch === '<';
|
||||
const notOk = isVerbatim ? ['\n', '\t', ' ', '>'] : ['\n', '\t', ' ', '[', ']', '{', '}', ','];
|
||||
|
||||
while (ch && notOk.indexOf(ch) === -1) ch = src[offset += 1];
|
||||
|
||||
if (isVerbatim && ch === '>') offset += 1;
|
||||
return offset;
|
||||
}
|
||||
|
||||
static endOfIndent(src, offset) {
|
||||
let ch = src[offset];
|
||||
|
||||
while (ch === ' ') ch = src[offset += 1];
|
||||
|
||||
return offset;
|
||||
}
|
||||
|
||||
static endOfLine(src, offset) {
|
||||
let ch = src[offset];
|
||||
|
||||
while (ch && ch !== '\n') ch = src[offset += 1];
|
||||
|
||||
return offset;
|
||||
}
|
||||
|
||||
static endOfWhiteSpace(src, offset) {
|
||||
let ch = src[offset];
|
||||
|
||||
while (ch === '\t' || ch === ' ') ch = src[offset += 1];
|
||||
|
||||
return offset;
|
||||
}
|
||||
|
||||
static startOfLine(src, offset) {
|
||||
let ch = src[offset - 1];
|
||||
if (ch === '\n') return offset;
|
||||
|
||||
while (ch && ch !== '\n') ch = src[offset -= 1];
|
||||
|
||||
return offset + 1;
|
||||
}
|
||||
/**
|
||||
* End of indentation, or null if the line's indent level is not more
|
||||
* than `indent`
|
||||
*
|
||||
* @param {string} src
|
||||
* @param {number} indent
|
||||
* @param {number} lineStart
|
||||
* @returns {?number}
|
||||
*/
|
||||
|
||||
|
||||
static endOfBlockIndent(src, indent, lineStart) {
|
||||
const inEnd = Node.endOfIndent(src, lineStart);
|
||||
|
||||
if (inEnd > lineStart + indent) {
|
||||
return inEnd;
|
||||
} else {
|
||||
const wsEnd = Node.endOfWhiteSpace(src, inEnd);
|
||||
const ch = src[wsEnd];
|
||||
if (!ch || ch === '\n') return wsEnd;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
static atBlank(src, offset, endAsBlank) {
|
||||
const ch = src[offset];
|
||||
return ch === '\n' || ch === '\t' || ch === ' ' || endAsBlank && !ch;
|
||||
}
|
||||
|
||||
static nextNodeIsIndented(ch, indentDiff, indicatorAsIndent) {
|
||||
if (!ch || indentDiff < 0) return false;
|
||||
if (indentDiff > 0) return true;
|
||||
return indicatorAsIndent && ch === '-';
|
||||
} // should be at line or string end, or at next non-whitespace char
|
||||
|
||||
|
||||
static normalizeOffset(src, offset) {
|
||||
const ch = src[offset];
|
||||
return !ch ? offset : ch !== '\n' && src[offset - 1] === '\n' ? offset - 1 : Node.endOfWhiteSpace(src, offset);
|
||||
} // fold single newline into space, multiple newlines to N - 1 newlines
|
||||
// presumes src[offset] === '\n'
|
||||
|
||||
|
||||
static foldNewline(src, offset, indent) {
|
||||
let inCount = 0;
|
||||
let error = false;
|
||||
let fold = '';
|
||||
let ch = src[offset + 1];
|
||||
|
||||
while (ch === ' ' || ch === '\t' || ch === '\n') {
|
||||
switch (ch) {
|
||||
case '\n':
|
||||
inCount = 0;
|
||||
offset += 1;
|
||||
fold += '\n';
|
||||
break;
|
||||
|
||||
case '\t':
|
||||
if (inCount <= indent) error = true;
|
||||
offset = Node.endOfWhiteSpace(src, offset + 2) - 1;
|
||||
break;
|
||||
|
||||
case ' ':
|
||||
inCount += 1;
|
||||
offset += 1;
|
||||
break;
|
||||
}
|
||||
|
||||
ch = src[offset + 1];
|
||||
}
|
||||
|
||||
if (!fold) fold = ' ';
|
||||
if (ch && inCount <= indent) error = true;
|
||||
return {
|
||||
fold,
|
||||
offset,
|
||||
error
|
||||
};
|
||||
}
|
||||
|
||||
constructor(type, props, context) {
|
||||
Object.defineProperty(this, 'context', {
|
||||
value: context || null,
|
||||
writable: true
|
||||
});
|
||||
this.error = null;
|
||||
this.range = null;
|
||||
this.valueRange = null;
|
||||
this.props = props || [];
|
||||
this.type = type;
|
||||
this.value = null;
|
||||
}
|
||||
|
||||
getPropValue(idx, key, skipKey) {
|
||||
if (!this.context) return null;
|
||||
const {
|
||||
src
|
||||
} = this.context;
|
||||
const prop = this.props[idx];
|
||||
return prop && src[prop.start] === key ? src.slice(prop.start + (skipKey ? 1 : 0), prop.end) : null;
|
||||
}
|
||||
|
||||
get anchor() {
|
||||
for (let i = 0; i < this.props.length; ++i) {
|
||||
const anchor = this.getPropValue(i, Char.ANCHOR, true);
|
||||
if (anchor != null) return anchor;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
get comment() {
|
||||
const comments = [];
|
||||
|
||||
for (let i = 0; i < this.props.length; ++i) {
|
||||
const comment = this.getPropValue(i, Char.COMMENT, true);
|
||||
if (comment != null) comments.push(comment);
|
||||
}
|
||||
|
||||
return comments.length > 0 ? comments.join('\n') : null;
|
||||
}
|
||||
|
||||
commentHasRequiredWhitespace(start) {
|
||||
const {
|
||||
src
|
||||
} = this.context;
|
||||
if (this.header && start === this.header.end) return false;
|
||||
if (!this.valueRange) return false;
|
||||
const {
|
||||
end
|
||||
} = this.valueRange;
|
||||
return start !== end || Node.atBlank(src, end - 1);
|
||||
}
|
||||
|
||||
get hasComment() {
|
||||
if (this.context) {
|
||||
const {
|
||||
src
|
||||
} = this.context;
|
||||
|
||||
for (let i = 0; i < this.props.length; ++i) {
|
||||
if (src[this.props[i].start] === Char.COMMENT) return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
get hasProps() {
|
||||
if (this.context) {
|
||||
const {
|
||||
src
|
||||
} = this.context;
|
||||
|
||||
for (let i = 0; i < this.props.length; ++i) {
|
||||
if (src[this.props[i].start] !== Char.COMMENT) return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
get includesTrailingLines() {
|
||||
return false;
|
||||
}
|
||||
|
||||
get jsonLike() {
|
||||
const jsonLikeTypes = [Type.FLOW_MAP, Type.FLOW_SEQ, Type.QUOTE_DOUBLE, Type.QUOTE_SINGLE];
|
||||
return jsonLikeTypes.indexOf(this.type) !== -1;
|
||||
}
|
||||
|
||||
get rangeAsLinePos() {
|
||||
if (!this.range || !this.context) return undefined;
|
||||
const start = getLinePos(this.range.start, this.context.root);
|
||||
if (!start) return undefined;
|
||||
const end = getLinePos(this.range.end, this.context.root);
|
||||
return {
|
||||
start,
|
||||
end
|
||||
};
|
||||
}
|
||||
|
||||
get rawValue() {
|
||||
if (!this.valueRange || !this.context) return null;
|
||||
const {
|
||||
start,
|
||||
end
|
||||
} = this.valueRange;
|
||||
return this.context.src.slice(start, end);
|
||||
}
|
||||
|
||||
get tag() {
|
||||
for (let i = 0; i < this.props.length; ++i) {
|
||||
const tag = this.getPropValue(i, Char.TAG, false);
|
||||
|
||||
if (tag != null) {
|
||||
if (tag[1] === '<') {
|
||||
return {
|
||||
verbatim: tag.slice(2, -1)
|
||||
};
|
||||
} else {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const [_, handle, suffix] = tag.match(/^(.*!)([^!]*)$/);
|
||||
return {
|
||||
handle,
|
||||
suffix
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
get valueRangeContainsNewline() {
|
||||
if (!this.valueRange || !this.context) return false;
|
||||
const {
|
||||
start,
|
||||
end
|
||||
} = this.valueRange;
|
||||
const {
|
||||
src
|
||||
} = this.context;
|
||||
|
||||
for (let i = start; i < end; ++i) {
|
||||
if (src[i] === '\n') return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
parseComment(start) {
|
||||
const {
|
||||
src
|
||||
} = this.context;
|
||||
|
||||
if (src[start] === Char.COMMENT) {
|
||||
const end = Node.endOfLine(src, start + 1);
|
||||
const commentRange = new Range(start, end);
|
||||
this.props.push(commentRange);
|
||||
return end;
|
||||
}
|
||||
|
||||
return start;
|
||||
}
|
||||
/**
|
||||
* Populates the `origStart` and `origEnd` values of all ranges for this
|
||||
* node. Extended by child classes to handle descendant nodes.
|
||||
*
|
||||
* @param {number[]} cr - Positions of dropped CR characters
|
||||
* @param {number} offset - Starting index of `cr` from the last call
|
||||
* @returns {number} - The next offset, matching the one found for `origStart`
|
||||
*/
|
||||
|
||||
|
||||
setOrigRanges(cr, offset) {
|
||||
if (this.range) offset = this.range.setOrigRange(cr, offset);
|
||||
if (this.valueRange) this.valueRange.setOrigRange(cr, offset);
|
||||
this.props.forEach(prop => prop.setOrigRange(cr, offset));
|
||||
return offset;
|
||||
}
|
||||
|
||||
toString() {
|
||||
const {
|
||||
context: {
|
||||
src
|
||||
},
|
||||
range,
|
||||
value
|
||||
} = this;
|
||||
if (value != null) return value;
|
||||
const str = src.slice(range.start, range.end);
|
||||
return Node.addStringTerminator(src, range.end, str);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
class YAMLError extends Error {
|
||||
constructor(name, source, message) {
|
||||
if (!message || !(source instanceof Node)) throw new Error(`Invalid arguments for new ${name}`);
|
||||
super();
|
||||
this.name = name;
|
||||
this.message = message;
|
||||
this.source = source;
|
||||
}
|
||||
|
||||
makePretty() {
|
||||
if (!this.source) return;
|
||||
this.nodeType = this.source.type;
|
||||
const cst = this.source.context && this.source.context.root;
|
||||
|
||||
if (typeof this.offset === 'number') {
|
||||
this.range = new Range(this.offset, this.offset + 1);
|
||||
const start = cst && getLinePos(this.offset, cst);
|
||||
|
||||
if (start) {
|
||||
const end = {
|
||||
line: start.line,
|
||||
col: start.col + 1
|
||||
};
|
||||
this.linePos = {
|
||||
start,
|
||||
end
|
||||
};
|
||||
}
|
||||
|
||||
delete this.offset;
|
||||
} else {
|
||||
this.range = this.source.range;
|
||||
this.linePos = this.source.rangeAsLinePos;
|
||||
}
|
||||
|
||||
if (this.linePos) {
|
||||
const {
|
||||
line,
|
||||
col
|
||||
} = this.linePos.start;
|
||||
this.message += ` at line ${line}, column ${col}`;
|
||||
const ctx = cst && getPrettyContext(this.linePos, cst);
|
||||
if (ctx) this.message += `:\n\n${ctx}\n`;
|
||||
}
|
||||
|
||||
delete this.source;
|
||||
}
|
||||
|
||||
}
|
||||
class YAMLReferenceError extends YAMLError {
|
||||
constructor(source, message) {
|
||||
super('YAMLReferenceError', source, message);
|
||||
}
|
||||
|
||||
}
|
||||
class YAMLSemanticError extends YAMLError {
|
||||
constructor(source, message) {
|
||||
super('YAMLSemanticError', source, message);
|
||||
}
|
||||
|
||||
}
|
||||
class YAMLSyntaxError extends YAMLError {
|
||||
constructor(source, message) {
|
||||
super('YAMLSyntaxError', source, message);
|
||||
}
|
||||
|
||||
}
|
||||
class YAMLWarning extends YAMLError {
|
||||
constructor(source, message) {
|
||||
super('YAMLWarning', source, message);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function _defineProperty(obj, key, value) {
|
||||
if (key in obj) {
|
||||
Object.defineProperty(obj, key, {
|
||||
value: value,
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
writable: true
|
||||
});
|
||||
} else {
|
||||
obj[key] = value;
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
class PlainValue extends Node {
|
||||
static endOfLine(src, start, inFlow) {
|
||||
let ch = src[start];
|
||||
let offset = start;
|
||||
|
||||
while (ch && ch !== '\n') {
|
||||
if (inFlow && (ch === '[' || ch === ']' || ch === '{' || ch === '}' || ch === ',')) break;
|
||||
const next = src[offset + 1];
|
||||
if (ch === ':' && (!next || next === '\n' || next === '\t' || next === ' ' || inFlow && next === ',')) break;
|
||||
if ((ch === ' ' || ch === '\t') && next === '#') break;
|
||||
offset += 1;
|
||||
ch = next;
|
||||
}
|
||||
|
||||
return offset;
|
||||
}
|
||||
|
||||
get strValue() {
|
||||
if (!this.valueRange || !this.context) return null;
|
||||
let {
|
||||
start,
|
||||
end
|
||||
} = this.valueRange;
|
||||
const {
|
||||
src
|
||||
} = this.context;
|
||||
let ch = src[end - 1];
|
||||
|
||||
while (start < end && (ch === '\n' || ch === '\t' || ch === ' ')) ch = src[--end - 1];
|
||||
|
||||
let str = '';
|
||||
|
||||
for (let i = start; i < end; ++i) {
|
||||
const ch = src[i];
|
||||
|
||||
if (ch === '\n') {
|
||||
const {
|
||||
fold,
|
||||
offset
|
||||
} = Node.foldNewline(src, i, -1);
|
||||
str += fold;
|
||||
i = offset;
|
||||
} else if (ch === ' ' || ch === '\t') {
|
||||
// trim trailing whitespace
|
||||
const wsStart = i;
|
||||
let next = src[i + 1];
|
||||
|
||||
while (i < end && (next === ' ' || next === '\t')) {
|
||||
i += 1;
|
||||
next = src[i + 1];
|
||||
}
|
||||
|
||||
if (next !== '\n') str += i > wsStart ? src.slice(wsStart, i + 1) : ch;
|
||||
} else {
|
||||
str += ch;
|
||||
}
|
||||
}
|
||||
|
||||
const ch0 = src[start];
|
||||
|
||||
switch (ch0) {
|
||||
case '\t':
|
||||
{
|
||||
const msg = 'Plain value cannot start with a tab character';
|
||||
const errors = [new YAMLSemanticError(this, msg)];
|
||||
return {
|
||||
errors,
|
||||
str
|
||||
};
|
||||
}
|
||||
|
||||
case '@':
|
||||
case '`':
|
||||
{
|
||||
const msg = `Plain value cannot start with reserved character ${ch0}`;
|
||||
const errors = [new YAMLSemanticError(this, msg)];
|
||||
return {
|
||||
errors,
|
||||
str
|
||||
};
|
||||
}
|
||||
|
||||
default:
|
||||
return str;
|
||||
}
|
||||
}
|
||||
|
||||
parseBlockValue(start) {
|
||||
const {
|
||||
indent,
|
||||
inFlow,
|
||||
src
|
||||
} = this.context;
|
||||
let offset = start;
|
||||
let valueEnd = start;
|
||||
|
||||
for (let ch = src[offset]; ch === '\n'; ch = src[offset]) {
|
||||
if (Node.atDocumentBoundary(src, offset + 1)) break;
|
||||
const end = Node.endOfBlockIndent(src, indent, offset + 1);
|
||||
if (end === null || src[end] === '#') break;
|
||||
|
||||
if (src[end] === '\n') {
|
||||
offset = end;
|
||||
} else {
|
||||
valueEnd = PlainValue.endOfLine(src, end, inFlow);
|
||||
offset = valueEnd;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.valueRange.isEmpty()) this.valueRange.start = start;
|
||||
this.valueRange.end = valueEnd;
|
||||
return valueEnd;
|
||||
}
|
||||
/**
|
||||
* Parses a plain value from the source
|
||||
*
|
||||
* Accepted forms are:
|
||||
* ```
|
||||
* #comment
|
||||
*
|
||||
* first line
|
||||
*
|
||||
* first line #comment
|
||||
*
|
||||
* first line
|
||||
* block
|
||||
* lines
|
||||
*
|
||||
* #comment
|
||||
* block
|
||||
* lines
|
||||
* ```
|
||||
* where block lines are empty or have an indent level greater than `indent`.
|
||||
*
|
||||
* @param {ParseContext} context
|
||||
* @param {number} start - Index of first character
|
||||
* @returns {number} - Index of the character after this scalar, may be `\n`
|
||||
*/
|
||||
|
||||
|
||||
parse(context, start) {
|
||||
this.context = context;
|
||||
const {
|
||||
inFlow,
|
||||
src
|
||||
} = context;
|
||||
let offset = start;
|
||||
const ch = src[offset];
|
||||
|
||||
if (ch && ch !== '#' && ch !== '\n') {
|
||||
offset = PlainValue.endOfLine(src, start, inFlow);
|
||||
}
|
||||
|
||||
this.valueRange = new Range(start, offset);
|
||||
offset = Node.endOfWhiteSpace(src, offset);
|
||||
offset = this.parseComment(offset);
|
||||
|
||||
if (!this.hasComment || this.valueRange.isEmpty()) {
|
||||
offset = this.parseBlockValue(offset);
|
||||
}
|
||||
|
||||
return offset;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
exports.Char = Char;
|
||||
exports.Node = Node;
|
||||
exports.PlainValue = PlainValue;
|
||||
exports.Range = Range;
|
||||
exports.Type = Type;
|
||||
exports.YAMLError = YAMLError;
|
||||
exports.YAMLReferenceError = YAMLReferenceError;
|
||||
exports.YAMLSemanticError = YAMLSemanticError;
|
||||
exports.YAMLSyntaxError = YAMLSyntaxError;
|
||||
exports.YAMLWarning = YAMLWarning;
|
||||
exports._defineProperty = _defineProperty;
|
||||
exports.defaultTagPrefix = defaultTagPrefix;
|
||||
exports.defaultTags = defaultTags;
|
522
node_modules/yaml/dist/Schema-42e9705c.js
generated
vendored
Normal file
522
node_modules/yaml/dist/Schema-42e9705c.js
generated
vendored
Normal file
|
@ -0,0 +1,522 @@
|
|||
'use strict';
|
||||
|
||||
var PlainValue = require('./PlainValue-ec8e588e.js');
|
||||
var resolveSeq = require('./resolveSeq-4a68b39b.js');
|
||||
var warnings = require('./warnings-39684f17.js');
|
||||
|
||||
function createMap(schema, obj, ctx) {
|
||||
const map = new resolveSeq.YAMLMap(schema);
|
||||
|
||||
if (obj instanceof Map) {
|
||||
for (const [key, value] of obj) map.items.push(schema.createPair(key, value, ctx));
|
||||
} else if (obj && typeof obj === 'object') {
|
||||
for (const key of Object.keys(obj)) map.items.push(schema.createPair(key, obj[key], ctx));
|
||||
}
|
||||
|
||||
if (typeof schema.sortMapEntries === 'function') {
|
||||
map.items.sort(schema.sortMapEntries);
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
const map = {
|
||||
createNode: createMap,
|
||||
default: true,
|
||||
nodeClass: resolveSeq.YAMLMap,
|
||||
tag: 'tag:yaml.org,2002:map',
|
||||
resolve: resolveSeq.resolveMap
|
||||
};
|
||||
|
||||
function createSeq(schema, obj, ctx) {
|
||||
const seq = new resolveSeq.YAMLSeq(schema);
|
||||
|
||||
if (obj && obj[Symbol.iterator]) {
|
||||
for (const it of obj) {
|
||||
const v = schema.createNode(it, ctx.wrapScalars, null, ctx);
|
||||
seq.items.push(v);
|
||||
}
|
||||
}
|
||||
|
||||
return seq;
|
||||
}
|
||||
|
||||
const seq = {
|
||||
createNode: createSeq,
|
||||
default: true,
|
||||
nodeClass: resolveSeq.YAMLSeq,
|
||||
tag: 'tag:yaml.org,2002:seq',
|
||||
resolve: resolveSeq.resolveSeq
|
||||
};
|
||||
|
||||
const string = {
|
||||
identify: value => typeof value === 'string',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:str',
|
||||
resolve: resolveSeq.resolveString,
|
||||
|
||||
stringify(item, ctx, onComment, onChompKeep) {
|
||||
ctx = Object.assign({
|
||||
actualString: true
|
||||
}, ctx);
|
||||
return resolveSeq.stringifyString(item, ctx, onComment, onChompKeep);
|
||||
},
|
||||
|
||||
options: resolveSeq.strOptions
|
||||
};
|
||||
|
||||
const failsafe = [map, seq, string];
|
||||
|
||||
/* global BigInt */
|
||||
|
||||
const intIdentify = value => typeof value === 'bigint' || Number.isInteger(value);
|
||||
|
||||
const intResolve = (src, part, radix) => resolveSeq.intOptions.asBigInt ? BigInt(src) : parseInt(part, radix);
|
||||
|
||||
function intStringify(node, radix, prefix) {
|
||||
const {
|
||||
value
|
||||
} = node;
|
||||
if (intIdentify(value) && value >= 0) return prefix + value.toString(radix);
|
||||
return resolveSeq.stringifyNumber(node);
|
||||
}
|
||||
|
||||
const nullObj = {
|
||||
identify: value => value == null,
|
||||
createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:null',
|
||||
test: /^(?:~|[Nn]ull|NULL)?$/,
|
||||
resolve: () => null,
|
||||
options: resolveSeq.nullOptions,
|
||||
stringify: () => resolveSeq.nullOptions.nullStr
|
||||
};
|
||||
const boolObj = {
|
||||
identify: value => typeof value === 'boolean',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
|
||||
resolve: str => str[0] === 't' || str[0] === 'T',
|
||||
options: resolveSeq.boolOptions,
|
||||
stringify: ({
|
||||
value
|
||||
}) => value ? resolveSeq.boolOptions.trueStr : resolveSeq.boolOptions.falseStr
|
||||
};
|
||||
const octObj = {
|
||||
identify: value => intIdentify(value) && value >= 0,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'OCT',
|
||||
test: /^0o([0-7]+)$/,
|
||||
resolve: (str, oct) => intResolve(str, oct, 8),
|
||||
options: resolveSeq.intOptions,
|
||||
stringify: node => intStringify(node, 8, '0o')
|
||||
};
|
||||
const intObj = {
|
||||
identify: intIdentify,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
test: /^[-+]?[0-9]+$/,
|
||||
resolve: str => intResolve(str, str, 10),
|
||||
options: resolveSeq.intOptions,
|
||||
stringify: resolveSeq.stringifyNumber
|
||||
};
|
||||
const hexObj = {
|
||||
identify: value => intIdentify(value) && value >= 0,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'HEX',
|
||||
test: /^0x([0-9a-fA-F]+)$/,
|
||||
resolve: (str, hex) => intResolve(str, hex, 16),
|
||||
options: resolveSeq.intOptions,
|
||||
stringify: node => intStringify(node, 16, '0x')
|
||||
};
|
||||
const nanObj = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^(?:[-+]?\.inf|(\.nan))$/i,
|
||||
resolve: (str, nan) => nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY,
|
||||
stringify: resolveSeq.stringifyNumber
|
||||
};
|
||||
const expObj = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
format: 'EXP',
|
||||
test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
|
||||
resolve: str => parseFloat(str),
|
||||
stringify: ({
|
||||
value
|
||||
}) => Number(value).toExponential()
|
||||
};
|
||||
const floatObj = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^[-+]?(?:\.([0-9]+)|[0-9]+\.([0-9]*))$/,
|
||||
|
||||
resolve(str, frac1, frac2) {
|
||||
const frac = frac1 || frac2;
|
||||
const node = new resolveSeq.Scalar(parseFloat(str));
|
||||
if (frac && frac[frac.length - 1] === '0') node.minFractionDigits = frac.length;
|
||||
return node;
|
||||
},
|
||||
|
||||
stringify: resolveSeq.stringifyNumber
|
||||
};
|
||||
const core = failsafe.concat([nullObj, boolObj, octObj, intObj, hexObj, nanObj, expObj, floatObj]);
|
||||
|
||||
/* global BigInt */
|
||||
|
||||
const intIdentify$1 = value => typeof value === 'bigint' || Number.isInteger(value);
|
||||
|
||||
const stringifyJSON = ({
|
||||
value
|
||||
}) => JSON.stringify(value);
|
||||
|
||||
const json = [map, seq, {
|
||||
identify: value => typeof value === 'string',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:str',
|
||||
resolve: resolveSeq.resolveString,
|
||||
stringify: stringifyJSON
|
||||
}, {
|
||||
identify: value => value == null,
|
||||
createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:null',
|
||||
test: /^null$/,
|
||||
resolve: () => null,
|
||||
stringify: stringifyJSON
|
||||
}, {
|
||||
identify: value => typeof value === 'boolean',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^true|false$/,
|
||||
resolve: str => str === 'true',
|
||||
stringify: stringifyJSON
|
||||
}, {
|
||||
identify: intIdentify$1,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
test: /^-?(?:0|[1-9][0-9]*)$/,
|
||||
resolve: str => resolveSeq.intOptions.asBigInt ? BigInt(str) : parseInt(str, 10),
|
||||
stringify: ({
|
||||
value
|
||||
}) => intIdentify$1(value) ? value.toString() : JSON.stringify(value)
|
||||
}, {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
|
||||
resolve: str => parseFloat(str),
|
||||
stringify: stringifyJSON
|
||||
}];
|
||||
|
||||
json.scalarFallback = str => {
|
||||
throw new SyntaxError(`Unresolved plain scalar ${JSON.stringify(str)}`);
|
||||
};
|
||||
|
||||
/* global BigInt */
|
||||
|
||||
const boolStringify = ({
|
||||
value
|
||||
}) => value ? resolveSeq.boolOptions.trueStr : resolveSeq.boolOptions.falseStr;
|
||||
|
||||
const intIdentify$2 = value => typeof value === 'bigint' || Number.isInteger(value);
|
||||
|
||||
function intResolve$1(sign, src, radix) {
|
||||
let str = src.replace(/_/g, '');
|
||||
|
||||
if (resolveSeq.intOptions.asBigInt) {
|
||||
switch (radix) {
|
||||
case 2:
|
||||
str = `0b${str}`;
|
||||
break;
|
||||
|
||||
case 8:
|
||||
str = `0o${str}`;
|
||||
break;
|
||||
|
||||
case 16:
|
||||
str = `0x${str}`;
|
||||
break;
|
||||
}
|
||||
|
||||
const n = BigInt(str);
|
||||
return sign === '-' ? BigInt(-1) * n : n;
|
||||
}
|
||||
|
||||
const n = parseInt(str, radix);
|
||||
return sign === '-' ? -1 * n : n;
|
||||
}
|
||||
|
||||
function intStringify$1(node, radix, prefix) {
|
||||
const {
|
||||
value
|
||||
} = node;
|
||||
|
||||
if (intIdentify$2(value)) {
|
||||
const str = value.toString(radix);
|
||||
return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
|
||||
}
|
||||
|
||||
return resolveSeq.stringifyNumber(node);
|
||||
}
|
||||
|
||||
const yaml11 = failsafe.concat([{
|
||||
identify: value => value == null,
|
||||
createNode: (schema, value, ctx) => ctx.wrapScalars ? new resolveSeq.Scalar(null) : null,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:null',
|
||||
test: /^(?:~|[Nn]ull|NULL)?$/,
|
||||
resolve: () => null,
|
||||
options: resolveSeq.nullOptions,
|
||||
stringify: () => resolveSeq.nullOptions.nullStr
|
||||
}, {
|
||||
identify: value => typeof value === 'boolean',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
|
||||
resolve: () => true,
|
||||
options: resolveSeq.boolOptions,
|
||||
stringify: boolStringify
|
||||
}, {
|
||||
identify: value => typeof value === 'boolean',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
|
||||
resolve: () => false,
|
||||
options: resolveSeq.boolOptions,
|
||||
stringify: boolStringify
|
||||
}, {
|
||||
identify: intIdentify$2,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'BIN',
|
||||
test: /^([-+]?)0b([0-1_]+)$/,
|
||||
resolve: (str, sign, bin) => intResolve$1(sign, bin, 2),
|
||||
stringify: node => intStringify$1(node, 2, '0b')
|
||||
}, {
|
||||
identify: intIdentify$2,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'OCT',
|
||||
test: /^([-+]?)0([0-7_]+)$/,
|
||||
resolve: (str, sign, oct) => intResolve$1(sign, oct, 8),
|
||||
stringify: node => intStringify$1(node, 8, '0')
|
||||
}, {
|
||||
identify: intIdentify$2,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
test: /^([-+]?)([0-9][0-9_]*)$/,
|
||||
resolve: (str, sign, abs) => intResolve$1(sign, abs, 10),
|
||||
stringify: resolveSeq.stringifyNumber
|
||||
}, {
|
||||
identify: intIdentify$2,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'HEX',
|
||||
test: /^([-+]?)0x([0-9a-fA-F_]+)$/,
|
||||
resolve: (str, sign, hex) => intResolve$1(sign, hex, 16),
|
||||
stringify: node => intStringify$1(node, 16, '0x')
|
||||
}, {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^(?:[-+]?\.inf|(\.nan))$/i,
|
||||
resolve: (str, nan) => nan ? NaN : str[0] === '-' ? Number.NEGATIVE_INFINITY : Number.POSITIVE_INFINITY,
|
||||
stringify: resolveSeq.stringifyNumber
|
||||
}, {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
format: 'EXP',
|
||||
test: /^[-+]?([0-9][0-9_]*)?(\.[0-9_]*)?[eE][-+]?[0-9]+$/,
|
||||
resolve: str => parseFloat(str.replace(/_/g, '')),
|
||||
stringify: ({
|
||||
value
|
||||
}) => Number(value).toExponential()
|
||||
}, {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^[-+]?(?:[0-9][0-9_]*)?\.([0-9_]*)$/,
|
||||
|
||||
resolve(str, frac) {
|
||||
const node = new resolveSeq.Scalar(parseFloat(str.replace(/_/g, '')));
|
||||
|
||||
if (frac) {
|
||||
const f = frac.replace(/_/g, '');
|
||||
if (f[f.length - 1] === '0') node.minFractionDigits = f.length;
|
||||
}
|
||||
|
||||
return node;
|
||||
},
|
||||
|
||||
stringify: resolveSeq.stringifyNumber
|
||||
}], warnings.binary, warnings.omap, warnings.pairs, warnings.set, warnings.intTime, warnings.floatTime, warnings.timestamp);
|
||||
|
||||
const schemas = {
|
||||
core,
|
||||
failsafe,
|
||||
json,
|
||||
yaml11
|
||||
};
|
||||
const tags = {
|
||||
binary: warnings.binary,
|
||||
bool: boolObj,
|
||||
float: floatObj,
|
||||
floatExp: expObj,
|
||||
floatNaN: nanObj,
|
||||
floatTime: warnings.floatTime,
|
||||
int: intObj,
|
||||
intHex: hexObj,
|
||||
intOct: octObj,
|
||||
intTime: warnings.intTime,
|
||||
map,
|
||||
null: nullObj,
|
||||
omap: warnings.omap,
|
||||
pairs: warnings.pairs,
|
||||
seq,
|
||||
set: warnings.set,
|
||||
timestamp: warnings.timestamp
|
||||
};
|
||||
|
||||
function findTagObject(value, tagName, tags) {
|
||||
if (tagName) {
|
||||
const match = tags.filter(t => t.tag === tagName);
|
||||
const tagObj = match.find(t => !t.format) || match[0];
|
||||
if (!tagObj) throw new Error(`Tag ${tagName} not found`);
|
||||
return tagObj;
|
||||
} // TODO: deprecate/remove class check
|
||||
|
||||
|
||||
return tags.find(t => (t.identify && t.identify(value) || t.class && value instanceof t.class) && !t.format);
|
||||
}
|
||||
|
||||
function createNode(value, tagName, ctx) {
|
||||
if (value instanceof resolveSeq.Node) return value;
|
||||
const {
|
||||
defaultPrefix,
|
||||
onTagObj,
|
||||
prevObjects,
|
||||
schema,
|
||||
wrapScalars
|
||||
} = ctx;
|
||||
if (tagName && tagName.startsWith('!!')) tagName = defaultPrefix + tagName.slice(2);
|
||||
let tagObj = findTagObject(value, tagName, schema.tags);
|
||||
|
||||
if (!tagObj) {
|
||||
if (typeof value.toJSON === 'function') value = value.toJSON();
|
||||
if (typeof value !== 'object') return wrapScalars ? new resolveSeq.Scalar(value) : value;
|
||||
tagObj = value instanceof Map ? map : value[Symbol.iterator] ? seq : map;
|
||||
}
|
||||
|
||||
if (onTagObj) {
|
||||
onTagObj(tagObj);
|
||||
delete ctx.onTagObj;
|
||||
} // Detect duplicate references to the same object & use Alias nodes for all
|
||||
// after first. The `obj` wrapper allows for circular references to resolve.
|
||||
|
||||
|
||||
const obj = {};
|
||||
|
||||
if (value && typeof value === 'object' && prevObjects) {
|
||||
const prev = prevObjects.get(value);
|
||||
|
||||
if (prev) {
|
||||
const alias = new resolveSeq.Alias(prev); // leaves source dirty; must be cleaned by caller
|
||||
|
||||
ctx.aliasNodes.push(alias); // defined along with prevObjects
|
||||
|
||||
return alias;
|
||||
}
|
||||
|
||||
obj.value = value;
|
||||
prevObjects.set(value, obj);
|
||||
}
|
||||
|
||||
obj.node = tagObj.createNode ? tagObj.createNode(ctx.schema, value, ctx) : wrapScalars ? new resolveSeq.Scalar(value) : value;
|
||||
if (tagName && obj.node instanceof resolveSeq.Node) obj.node.tag = tagName;
|
||||
return obj.node;
|
||||
}
|
||||
|
||||
function getSchemaTags(schemas, knownTags, customTags, schemaId) {
|
||||
let tags = schemas[schemaId.replace(/\W/g, '')]; // 'yaml-1.1' -> 'yaml11'
|
||||
|
||||
if (!tags) {
|
||||
const keys = Object.keys(schemas).map(key => JSON.stringify(key)).join(', ');
|
||||
throw new Error(`Unknown schema "${schemaId}"; use one of ${keys}`);
|
||||
}
|
||||
|
||||
if (Array.isArray(customTags)) {
|
||||
for (const tag of customTags) tags = tags.concat(tag);
|
||||
} else if (typeof customTags === 'function') {
|
||||
tags = customTags(tags.slice());
|
||||
}
|
||||
|
||||
for (let i = 0; i < tags.length; ++i) {
|
||||
const tag = tags[i];
|
||||
|
||||
if (typeof tag === 'string') {
|
||||
const tagObj = knownTags[tag];
|
||||
|
||||
if (!tagObj) {
|
||||
const keys = Object.keys(knownTags).map(key => JSON.stringify(key)).join(', ');
|
||||
throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
|
||||
}
|
||||
|
||||
tags[i] = tagObj;
|
||||
}
|
||||
}
|
||||
|
||||
return tags;
|
||||
}
|
||||
|
||||
const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
|
||||
|
||||
class Schema {
|
||||
// TODO: remove in v2
|
||||
// TODO: remove in v2
|
||||
constructor({
|
||||
customTags,
|
||||
merge,
|
||||
schema,
|
||||
sortMapEntries,
|
||||
tags: deprecatedCustomTags
|
||||
}) {
|
||||
this.merge = !!merge;
|
||||
this.name = schema;
|
||||
this.sortMapEntries = sortMapEntries === true ? sortMapEntriesByKey : sortMapEntries || null;
|
||||
if (!customTags && deprecatedCustomTags) warnings.warnOptionDeprecation('tags', 'customTags');
|
||||
this.tags = getSchemaTags(schemas, tags, customTags || deprecatedCustomTags, schema);
|
||||
}
|
||||
|
||||
createNode(value, wrapScalars, tagName, ctx) {
|
||||
const baseCtx = {
|
||||
defaultPrefix: Schema.defaultPrefix,
|
||||
schema: this,
|
||||
wrapScalars
|
||||
};
|
||||
const createCtx = ctx ? Object.assign(ctx, baseCtx) : baseCtx;
|
||||
return createNode(value, tagName, createCtx);
|
||||
}
|
||||
|
||||
createPair(key, value, ctx) {
|
||||
if (!ctx) ctx = {
|
||||
wrapScalars: true
|
||||
};
|
||||
const k = this.createNode(key, ctx.wrapScalars, null, ctx);
|
||||
const v = this.createNode(value, ctx.wrapScalars, null, ctx);
|
||||
return new resolveSeq.Pair(k, v);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
PlainValue._defineProperty(Schema, "defaultPrefix", PlainValue.defaultTagPrefix);
|
||||
|
||||
PlainValue._defineProperty(Schema, "defaultTags", PlainValue.defaultTags);
|
||||
|
||||
exports.Schema = Schema;
|
79
node_modules/yaml/dist/index.js
generated
vendored
Normal file
79
node_modules/yaml/dist/index.js
generated
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
'use strict';
|
||||
|
||||
var PlainValue = require('./PlainValue-ec8e588e.js');
|
||||
var parseCst = require('./parse-cst.js');
|
||||
require('./resolveSeq-4a68b39b.js');
|
||||
var Document$1 = require('./Document-2cf6b08c.js');
|
||||
var Schema = require('./Schema-42e9705c.js');
|
||||
var warnings = require('./warnings-39684f17.js');
|
||||
|
||||
function createNode(value, wrapScalars = true, tag) {
|
||||
if (tag === undefined && typeof wrapScalars === 'string') {
|
||||
tag = wrapScalars;
|
||||
wrapScalars = true;
|
||||
}
|
||||
|
||||
const options = Object.assign({}, Document$1.Document.defaults[Document$1.defaultOptions.version], Document$1.defaultOptions);
|
||||
const schema = new Schema.Schema(options);
|
||||
return schema.createNode(value, wrapScalars, tag);
|
||||
}
|
||||
|
||||
class Document extends Document$1.Document {
|
||||
constructor(options) {
|
||||
super(Object.assign({}, Document$1.defaultOptions, options));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function parseAllDocuments(src, options) {
|
||||
const stream = [];
|
||||
let prev;
|
||||
|
||||
for (const cstDoc of parseCst.parse(src)) {
|
||||
const doc = new Document(options);
|
||||
doc.parse(cstDoc, prev);
|
||||
stream.push(doc);
|
||||
prev = doc;
|
||||
}
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
function parseDocument(src, options) {
|
||||
const cst = parseCst.parse(src);
|
||||
const doc = new Document(options).parse(cst[0]);
|
||||
|
||||
if (cst.length > 1) {
|
||||
const errMsg = 'Source contains multiple documents; please use YAML.parseAllDocuments()';
|
||||
doc.errors.unshift(new PlainValue.YAMLSemanticError(cst[1], errMsg));
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
function parse(src, options) {
|
||||
const doc = parseDocument(src, options);
|
||||
doc.warnings.forEach(warning => warnings.warn(warning));
|
||||
if (doc.errors.length > 0) throw doc.errors[0];
|
||||
return doc.toJSON();
|
||||
}
|
||||
|
||||
function stringify(value, options) {
|
||||
const doc = new Document(options);
|
||||
doc.contents = value;
|
||||
return String(doc);
|
||||
}
|
||||
|
||||
const YAML = {
|
||||
createNode,
|
||||
defaultOptions: Document$1.defaultOptions,
|
||||
Document,
|
||||
parse,
|
||||
parseAllDocuments,
|
||||
parseCST: parseCst.parse,
|
||||
parseDocument,
|
||||
scalarOptions: Document$1.scalarOptions,
|
||||
stringify
|
||||
};
|
||||
|
||||
exports.YAML = YAML;
|
16
node_modules/yaml/dist/legacy-exports.js
generated
vendored
Normal file
16
node_modules/yaml/dist/legacy-exports.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
'use strict';
|
||||
|
||||
require('./PlainValue-ec8e588e.js');
|
||||
require('./resolveSeq-4a68b39b.js');
|
||||
var warnings = require('./warnings-39684f17.js');
|
||||
|
||||
|
||||
|
||||
exports.binary = warnings.binary;
|
||||
exports.floatTime = warnings.floatTime;
|
||||
exports.intTime = warnings.intTime;
|
||||
exports.omap = warnings.omap;
|
||||
exports.pairs = warnings.pairs;
|
||||
exports.set = warnings.set;
|
||||
exports.timestamp = warnings.timestamp;
|
||||
exports.warnFileDeprecation = warnings.warnFileDeprecation;
|
1747
node_modules/yaml/dist/parse-cst.js
generated
vendored
Normal file
1747
node_modules/yaml/dist/parse-cst.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
2115
node_modules/yaml/dist/resolveSeq-4a68b39b.js
generated
vendored
Normal file
2115
node_modules/yaml/dist/resolveSeq-4a68b39b.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
162
node_modules/yaml/dist/test-events.js
generated
vendored
Normal file
162
node_modules/yaml/dist/test-events.js
generated
vendored
Normal file
|
@ -0,0 +1,162 @@
|
|||
'use strict';
|
||||
|
||||
require('./PlainValue-ec8e588e.js');
|
||||
var parseCst = require('./parse-cst.js');
|
||||
require('./resolveSeq-4a68b39b.js');
|
||||
var Document$1 = require('./Document-2cf6b08c.js');
|
||||
require('./Schema-42e9705c.js');
|
||||
require('./warnings-39684f17.js');
|
||||
|
||||
function testEvents(src, options) {
|
||||
const opt = Object.assign({
|
||||
keepCstNodes: true,
|
||||
keepNodeTypes: true,
|
||||
version: '1.2'
|
||||
}, options);
|
||||
const docs = parseCst.parse(src).map(cstDoc => new Document$1.Document(opt).parse(cstDoc));
|
||||
const errDoc = docs.find(doc => doc.errors.length > 0);
|
||||
const error = errDoc ? errDoc.errors[0].message : null;
|
||||
const events = ['+STR'];
|
||||
|
||||
try {
|
||||
for (let i = 0; i < docs.length; ++i) {
|
||||
const doc = docs[i];
|
||||
let root = doc.contents;
|
||||
if (Array.isArray(root)) root = root[0];
|
||||
const [rootStart, rootEnd] = doc.range || [0, 0];
|
||||
let e = doc.errors[0] && doc.errors[0].source;
|
||||
if (e && e.type === 'SEQ_ITEM') e = e.node;
|
||||
if (e && (e.type === 'DOCUMENT' || e.range.start < rootStart)) throw new Error();
|
||||
let docStart = '+DOC';
|
||||
const pre = src.slice(0, rootStart);
|
||||
const explicitDoc = /---\s*$/.test(pre);
|
||||
if (explicitDoc) docStart += ' ---';else if (!doc.contents) continue;
|
||||
events.push(docStart);
|
||||
addEvents(events, doc, e, root);
|
||||
if (doc.contents && doc.contents.length > 1) throw new Error();
|
||||
let docEnd = '-DOC';
|
||||
|
||||
if (rootEnd) {
|
||||
const post = src.slice(rootEnd);
|
||||
if (/^\.\.\./.test(post)) docEnd += ' ...';
|
||||
}
|
||||
|
||||
events.push(docEnd);
|
||||
}
|
||||
} catch (e) {
|
||||
return {
|
||||
events,
|
||||
error: error || e
|
||||
};
|
||||
}
|
||||
|
||||
events.push('-STR');
|
||||
return {
|
||||
events,
|
||||
error
|
||||
};
|
||||
}
|
||||
|
||||
function addEvents(events, doc, e, node) {
|
||||
if (!node) {
|
||||
events.push('=VAL :');
|
||||
return;
|
||||
}
|
||||
|
||||
if (e && node.cstNode === e) throw new Error();
|
||||
let props = '';
|
||||
let anchor = doc.anchors.getName(node);
|
||||
|
||||
if (anchor) {
|
||||
if (/\d$/.test(anchor)) {
|
||||
const alt = anchor.replace(/\d$/, '');
|
||||
if (doc.anchors.getNode(alt)) anchor = alt;
|
||||
}
|
||||
|
||||
props = ` &${anchor}`;
|
||||
}
|
||||
|
||||
if (node.cstNode && node.cstNode.tag) {
|
||||
const {
|
||||
handle,
|
||||
suffix
|
||||
} = node.cstNode.tag;
|
||||
props += handle === '!' && !suffix ? ' <!>' : ` <${node.tag}>`;
|
||||
}
|
||||
|
||||
let scalar = null;
|
||||
|
||||
switch (node.type) {
|
||||
case 'ALIAS':
|
||||
{
|
||||
let alias = doc.anchors.getName(node.source);
|
||||
|
||||
if (/\d$/.test(alias)) {
|
||||
const alt = alias.replace(/\d$/, '');
|
||||
if (doc.anchors.getNode(alt)) alias = alt;
|
||||
}
|
||||
|
||||
events.push(`=ALI${props} *${alias}`);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'BLOCK_FOLDED':
|
||||
scalar = '>';
|
||||
break;
|
||||
|
||||
case 'BLOCK_LITERAL':
|
||||
scalar = '|';
|
||||
break;
|
||||
|
||||
case 'PLAIN':
|
||||
scalar = ':';
|
||||
break;
|
||||
|
||||
case 'QUOTE_DOUBLE':
|
||||
scalar = '"';
|
||||
break;
|
||||
|
||||
case 'QUOTE_SINGLE':
|
||||
scalar = "'";
|
||||
break;
|
||||
|
||||
case 'PAIR':
|
||||
events.push(`+MAP${props}`);
|
||||
addEvents(events, doc, e, node.key);
|
||||
addEvents(events, doc, e, node.value);
|
||||
events.push('-MAP');
|
||||
break;
|
||||
|
||||
case 'FLOW_SEQ':
|
||||
case 'SEQ':
|
||||
events.push(`+SEQ${props}`);
|
||||
node.items.forEach(item => {
|
||||
addEvents(events, doc, e, item);
|
||||
});
|
||||
events.push('-SEQ');
|
||||
break;
|
||||
|
||||
case 'FLOW_MAP':
|
||||
case 'MAP':
|
||||
events.push(`+MAP${props}`);
|
||||
node.items.forEach(({
|
||||
key,
|
||||
value
|
||||
}) => {
|
||||
addEvents(events, doc, e, key);
|
||||
addEvents(events, doc, e, value);
|
||||
});
|
||||
events.push('-MAP');
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error(`Unexpected node type ${node.type}`);
|
||||
}
|
||||
|
||||
if (scalar) {
|
||||
const value = node.cstNode.strValue.replace(/\\/g, '\\\\').replace(/\0/g, '\\0').replace(/\x07/g, '\\a').replace(/\x08/g, '\\b').replace(/\t/g, '\\t').replace(/\n/g, '\\n').replace(/\v/g, '\\v').replace(/\f/g, '\\f').replace(/\r/g, '\\r').replace(/\x1b/g, '\\e');
|
||||
events.push(`=VAL${props} ${scalar}${value}`);
|
||||
}
|
||||
}
|
||||
|
||||
exports.testEvents = testEvents;
|
23
node_modules/yaml/dist/types.js
generated
vendored
Normal file
23
node_modules/yaml/dist/types.js
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
'use strict';
|
||||
|
||||
require('./PlainValue-ec8e588e.js');
|
||||
var resolveSeq = require('./resolveSeq-4a68b39b.js');
|
||||
var Schema = require('./Schema-42e9705c.js');
|
||||
require('./warnings-39684f17.js');
|
||||
|
||||
|
||||
|
||||
exports.Alias = resolveSeq.Alias;
|
||||
exports.Collection = resolveSeq.Collection;
|
||||
exports.Merge = resolveSeq.Merge;
|
||||
exports.Node = resolveSeq.Node;
|
||||
exports.Pair = resolveSeq.Pair;
|
||||
exports.Scalar = resolveSeq.Scalar;
|
||||
exports.YAMLMap = resolveSeq.YAMLMap;
|
||||
exports.YAMLSeq = resolveSeq.YAMLSeq;
|
||||
exports.binaryOptions = resolveSeq.binaryOptions;
|
||||
exports.boolOptions = resolveSeq.boolOptions;
|
||||
exports.intOptions = resolveSeq.intOptions;
|
||||
exports.nullOptions = resolveSeq.nullOptions;
|
||||
exports.strOptions = resolveSeq.strOptions;
|
||||
exports.Schema = Schema.Schema;
|
19
node_modules/yaml/dist/util.js
generated
vendored
Normal file
19
node_modules/yaml/dist/util.js
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
'use strict';
|
||||
|
||||
var PlainValue = require('./PlainValue-ec8e588e.js');
|
||||
var resolveSeq = require('./resolveSeq-4a68b39b.js');
|
||||
|
||||
|
||||
|
||||
exports.Type = PlainValue.Type;
|
||||
exports.YAMLError = PlainValue.YAMLError;
|
||||
exports.YAMLReferenceError = PlainValue.YAMLReferenceError;
|
||||
exports.YAMLSemanticError = PlainValue.YAMLSemanticError;
|
||||
exports.YAMLSyntaxError = PlainValue.YAMLSyntaxError;
|
||||
exports.YAMLWarning = PlainValue.YAMLWarning;
|
||||
exports.findPair = resolveSeq.findPair;
|
||||
exports.parseMap = resolveSeq.resolveMap;
|
||||
exports.parseSeq = resolveSeq.resolveSeq;
|
||||
exports.stringifyNumber = resolveSeq.stringifyNumber;
|
||||
exports.stringifyString = resolveSeq.stringifyString;
|
||||
exports.toJSON = resolveSeq.toJSON;
|
416
node_modules/yaml/dist/warnings-39684f17.js
generated
vendored
Normal file
416
node_modules/yaml/dist/warnings-39684f17.js
generated
vendored
Normal file
|
@ -0,0 +1,416 @@
|
|||
'use strict';
|
||||
|
||||
var PlainValue = require('./PlainValue-ec8e588e.js');
|
||||
var resolveSeq = require('./resolveSeq-4a68b39b.js');
|
||||
|
||||
/* global atob, btoa, Buffer */
|
||||
const binary = {
|
||||
identify: value => value instanceof Uint8Array,
|
||||
// Buffer inherits from Uint8Array
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:binary',
|
||||
|
||||
/**
|
||||
* Returns a Buffer in node and an Uint8Array in browsers
|
||||
*
|
||||
* To use the resulting buffer as an image, you'll want to do something like:
|
||||
*
|
||||
* const blob = new Blob([buffer], { type: 'image/jpeg' })
|
||||
* document.querySelector('#photo').src = URL.createObjectURL(blob)
|
||||
*/
|
||||
resolve: (doc, node) => {
|
||||
const src = resolveSeq.resolveString(doc, node);
|
||||
|
||||
if (typeof Buffer === 'function') {
|
||||
return Buffer.from(src, 'base64');
|
||||
} else if (typeof atob === 'function') {
|
||||
// On IE 11, atob() can't handle newlines
|
||||
const str = atob(src.replace(/[\n\r]/g, ''));
|
||||
const buffer = new Uint8Array(str.length);
|
||||
|
||||
for (let i = 0; i < str.length; ++i) buffer[i] = str.charCodeAt(i);
|
||||
|
||||
return buffer;
|
||||
} else {
|
||||
const msg = 'This environment does not support reading binary tags; either Buffer or atob is required';
|
||||
doc.errors.push(new PlainValue.YAMLReferenceError(node, msg));
|
||||
return null;
|
||||
}
|
||||
},
|
||||
options: resolveSeq.binaryOptions,
|
||||
stringify: ({
|
||||
comment,
|
||||
type,
|
||||
value
|
||||
}, ctx, onComment, onChompKeep) => {
|
||||
let src;
|
||||
|
||||
if (typeof Buffer === 'function') {
|
||||
src = value instanceof Buffer ? value.toString('base64') : Buffer.from(value.buffer).toString('base64');
|
||||
} else if (typeof btoa === 'function') {
|
||||
let s = '';
|
||||
|
||||
for (let i = 0; i < value.length; ++i) s += String.fromCharCode(value[i]);
|
||||
|
||||
src = btoa(s);
|
||||
} else {
|
||||
throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
|
||||
}
|
||||
|
||||
if (!type) type = resolveSeq.binaryOptions.defaultType;
|
||||
|
||||
if (type === PlainValue.Type.QUOTE_DOUBLE) {
|
||||
value = src;
|
||||
} else {
|
||||
const {
|
||||
lineWidth
|
||||
} = resolveSeq.binaryOptions;
|
||||
const n = Math.ceil(src.length / lineWidth);
|
||||
const lines = new Array(n);
|
||||
|
||||
for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {
|
||||
lines[i] = src.substr(o, lineWidth);
|
||||
}
|
||||
|
||||
value = lines.join(type === PlainValue.Type.BLOCK_LITERAL ? '\n' : ' ');
|
||||
}
|
||||
|
||||
return resolveSeq.stringifyString({
|
||||
comment,
|
||||
type,
|
||||
value
|
||||
}, ctx, onComment, onChompKeep);
|
||||
}
|
||||
};
|
||||
|
||||
function parsePairs(doc, cst) {
|
||||
const seq = resolveSeq.resolveSeq(doc, cst);
|
||||
|
||||
for (let i = 0; i < seq.items.length; ++i) {
|
||||
let item = seq.items[i];
|
||||
if (item instanceof resolveSeq.Pair) continue;else if (item instanceof resolveSeq.YAMLMap) {
|
||||
if (item.items.length > 1) {
|
||||
const msg = 'Each pair must have its own sequence indicator';
|
||||
throw new PlainValue.YAMLSemanticError(cst, msg);
|
||||
}
|
||||
|
||||
const pair = item.items[0] || new resolveSeq.Pair();
|
||||
if (item.commentBefore) pair.commentBefore = pair.commentBefore ? `${item.commentBefore}\n${pair.commentBefore}` : item.commentBefore;
|
||||
if (item.comment) pair.comment = pair.comment ? `${item.comment}\n${pair.comment}` : item.comment;
|
||||
item = pair;
|
||||
}
|
||||
seq.items[i] = item instanceof resolveSeq.Pair ? item : new resolveSeq.Pair(item);
|
||||
}
|
||||
|
||||
return seq;
|
||||
}
|
||||
function createPairs(schema, iterable, ctx) {
|
||||
const pairs = new resolveSeq.YAMLSeq(schema);
|
||||
pairs.tag = 'tag:yaml.org,2002:pairs';
|
||||
|
||||
for (const it of iterable) {
|
||||
let key, value;
|
||||
|
||||
if (Array.isArray(it)) {
|
||||
if (it.length === 2) {
|
||||
key = it[0];
|
||||
value = it[1];
|
||||
} else throw new TypeError(`Expected [key, value] tuple: ${it}`);
|
||||
} else if (it && it instanceof Object) {
|
||||
const keys = Object.keys(it);
|
||||
|
||||
if (keys.length === 1) {
|
||||
key = keys[0];
|
||||
value = it[key];
|
||||
} else throw new TypeError(`Expected { key: value } tuple: ${it}`);
|
||||
} else {
|
||||
key = it;
|
||||
}
|
||||
|
||||
const pair = schema.createPair(key, value, ctx);
|
||||
pairs.items.push(pair);
|
||||
}
|
||||
|
||||
return pairs;
|
||||
}
|
||||
const pairs = {
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:pairs',
|
||||
resolve: parsePairs,
|
||||
createNode: createPairs
|
||||
};
|
||||
|
||||
class YAMLOMap extends resolveSeq.YAMLSeq {
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
PlainValue._defineProperty(this, "add", resolveSeq.YAMLMap.prototype.add.bind(this));
|
||||
|
||||
PlainValue._defineProperty(this, "delete", resolveSeq.YAMLMap.prototype.delete.bind(this));
|
||||
|
||||
PlainValue._defineProperty(this, "get", resolveSeq.YAMLMap.prototype.get.bind(this));
|
||||
|
||||
PlainValue._defineProperty(this, "has", resolveSeq.YAMLMap.prototype.has.bind(this));
|
||||
|
||||
PlainValue._defineProperty(this, "set", resolveSeq.YAMLMap.prototype.set.bind(this));
|
||||
|
||||
this.tag = YAMLOMap.tag;
|
||||
}
|
||||
|
||||
toJSON(_, ctx) {
|
||||
const map = new Map();
|
||||
if (ctx && ctx.onCreate) ctx.onCreate(map);
|
||||
|
||||
for (const pair of this.items) {
|
||||
let key, value;
|
||||
|
||||
if (pair instanceof resolveSeq.Pair) {
|
||||
key = resolveSeq.toJSON(pair.key, '', ctx);
|
||||
value = resolveSeq.toJSON(pair.value, key, ctx);
|
||||
} else {
|
||||
key = resolveSeq.toJSON(pair, '', ctx);
|
||||
}
|
||||
|
||||
if (map.has(key)) throw new Error('Ordered maps must not include duplicate keys');
|
||||
map.set(key, value);
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
PlainValue._defineProperty(YAMLOMap, "tag", 'tag:yaml.org,2002:omap');
|
||||
|
||||
function parseOMap(doc, cst) {
|
||||
const pairs = parsePairs(doc, cst);
|
||||
const seenKeys = [];
|
||||
|
||||
for (const {
|
||||
key
|
||||
} of pairs.items) {
|
||||
if (key instanceof resolveSeq.Scalar) {
|
||||
if (seenKeys.includes(key.value)) {
|
||||
const msg = 'Ordered maps must not include duplicate keys';
|
||||
throw new PlainValue.YAMLSemanticError(cst, msg);
|
||||
} else {
|
||||
seenKeys.push(key.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Object.assign(new YAMLOMap(), pairs);
|
||||
}
|
||||
|
||||
function createOMap(schema, iterable, ctx) {
|
||||
const pairs = createPairs(schema, iterable, ctx);
|
||||
const omap = new YAMLOMap();
|
||||
omap.items = pairs.items;
|
||||
return omap;
|
||||
}
|
||||
|
||||
const omap = {
|
||||
identify: value => value instanceof Map,
|
||||
nodeClass: YAMLOMap,
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:omap',
|
||||
resolve: parseOMap,
|
||||
createNode: createOMap
|
||||
};
|
||||
|
||||
class YAMLSet extends resolveSeq.YAMLMap {
|
||||
constructor() {
|
||||
super();
|
||||
this.tag = YAMLSet.tag;
|
||||
}
|
||||
|
||||
add(key) {
|
||||
const pair = key instanceof resolveSeq.Pair ? key : new resolveSeq.Pair(key);
|
||||
const prev = resolveSeq.findPair(this.items, pair.key);
|
||||
if (!prev) this.items.push(pair);
|
||||
}
|
||||
|
||||
get(key, keepPair) {
|
||||
const pair = resolveSeq.findPair(this.items, key);
|
||||
return !keepPair && pair instanceof resolveSeq.Pair ? pair.key instanceof resolveSeq.Scalar ? pair.key.value : pair.key : pair;
|
||||
}
|
||||
|
||||
set(key, value) {
|
||||
if (typeof value !== 'boolean') throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);
|
||||
const prev = resolveSeq.findPair(this.items, key);
|
||||
|
||||
if (prev && !value) {
|
||||
this.items.splice(this.items.indexOf(prev), 1);
|
||||
} else if (!prev && value) {
|
||||
this.items.push(new resolveSeq.Pair(key));
|
||||
}
|
||||
}
|
||||
|
||||
toJSON(_, ctx) {
|
||||
return super.toJSON(_, ctx, Set);
|
||||
}
|
||||
|
||||
toString(ctx, onComment, onChompKeep) {
|
||||
if (!ctx) return JSON.stringify(this);
|
||||
if (this.hasAllNullValues()) return super.toString(ctx, onComment, onChompKeep);else throw new Error('Set items must all have null values');
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
PlainValue._defineProperty(YAMLSet, "tag", 'tag:yaml.org,2002:set');
|
||||
|
||||
function parseSet(doc, cst) {
|
||||
const map = resolveSeq.resolveMap(doc, cst);
|
||||
if (!map.hasAllNullValues()) throw new PlainValue.YAMLSemanticError(cst, 'Set items must all have null values');
|
||||
return Object.assign(new YAMLSet(), map);
|
||||
}
|
||||
|
||||
function createSet(schema, iterable, ctx) {
|
||||
const set = new YAMLSet();
|
||||
|
||||
for (const value of iterable) set.items.push(schema.createPair(value, null, ctx));
|
||||
|
||||
return set;
|
||||
}
|
||||
|
||||
const set = {
|
||||
identify: value => value instanceof Set,
|
||||
nodeClass: YAMLSet,
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:set',
|
||||
resolve: parseSet,
|
||||
createNode: createSet
|
||||
};
|
||||
|
||||
const parseSexagesimal = (sign, parts) => {
|
||||
const n = parts.split(':').reduce((n, p) => n * 60 + Number(p), 0);
|
||||
return sign === '-' ? -n : n;
|
||||
}; // hhhh:mm:ss.sss
|
||||
|
||||
|
||||
const stringifySexagesimal = ({
|
||||
value
|
||||
}) => {
|
||||
if (isNaN(value) || !isFinite(value)) return resolveSeq.stringifyNumber(value);
|
||||
let sign = '';
|
||||
|
||||
if (value < 0) {
|
||||
sign = '-';
|
||||
value = Math.abs(value);
|
||||
}
|
||||
|
||||
const parts = [value % 60]; // seconds, including ms
|
||||
|
||||
if (value < 60) {
|
||||
parts.unshift(0); // at least one : is required
|
||||
} else {
|
||||
value = Math.round((value - parts[0]) / 60);
|
||||
parts.unshift(value % 60); // minutes
|
||||
|
||||
if (value >= 60) {
|
||||
value = Math.round((value - parts[0]) / 60);
|
||||
parts.unshift(value); // hours
|
||||
}
|
||||
}
|
||||
|
||||
return sign + parts.map(n => n < 10 ? '0' + String(n) : String(n)).join(':').replace(/000000\d*$/, '') // % 60 may introduce error
|
||||
;
|
||||
};
|
||||
|
||||
const intTime = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'TIME',
|
||||
test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+)$/,
|
||||
resolve: (str, sign, parts) => parseSexagesimal(sign, parts.replace(/_/g, '')),
|
||||
stringify: stringifySexagesimal
|
||||
};
|
||||
const floatTime = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
format: 'TIME',
|
||||
test: /^([-+]?)([0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*)$/,
|
||||
resolve: (str, sign, parts) => parseSexagesimal(sign, parts.replace(/_/g, '')),
|
||||
stringify: stringifySexagesimal
|
||||
};
|
||||
const timestamp = {
|
||||
identify: value => value instanceof Date,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:timestamp',
|
||||
// If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
|
||||
// may be omitted altogether, resulting in a date format. In such a case, the time part is
|
||||
// assumed to be 00:00:00Z (start of day, UTC).
|
||||
test: RegExp('^(?:' + '([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
|
||||
'(?:(?:t|T|[ \\t]+)' + // t | T | whitespace
|
||||
'([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
|
||||
'(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
|
||||
')?' + ')$'),
|
||||
resolve: (str, year, month, day, hour, minute, second, millisec, tz) => {
|
||||
if (millisec) millisec = (millisec + '00').substr(1, 3);
|
||||
let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec || 0);
|
||||
|
||||
if (tz && tz !== 'Z') {
|
||||
let d = parseSexagesimal(tz[0], tz.slice(1));
|
||||
if (Math.abs(d) < 30) d *= 60;
|
||||
date -= 60000 * d;
|
||||
}
|
||||
|
||||
return new Date(date);
|
||||
},
|
||||
stringify: ({
|
||||
value
|
||||
}) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
|
||||
};
|
||||
|
||||
/* global console, process, YAML_SILENCE_DEPRECATION_WARNINGS, YAML_SILENCE_WARNINGS */
|
||||
function shouldWarn(deprecation) {
|
||||
const env = typeof process !== 'undefined' && process.env || {};
|
||||
|
||||
if (deprecation) {
|
||||
if (typeof YAML_SILENCE_DEPRECATION_WARNINGS !== 'undefined') return !YAML_SILENCE_DEPRECATION_WARNINGS;
|
||||
return !env.YAML_SILENCE_DEPRECATION_WARNINGS;
|
||||
}
|
||||
|
||||
if (typeof YAML_SILENCE_WARNINGS !== 'undefined') return !YAML_SILENCE_WARNINGS;
|
||||
return !env.YAML_SILENCE_WARNINGS;
|
||||
}
|
||||
|
||||
function warn(warning, type) {
|
||||
if (shouldWarn(false)) {
|
||||
const emit = typeof process !== 'undefined' && process.emitWarning; // This will throw in Jest if `warning` is an Error instance due to
|
||||
// https://github.com/facebook/jest/issues/2549
|
||||
|
||||
if (emit) emit(warning, type);else {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(type ? `${type}: ${warning}` : warning);
|
||||
}
|
||||
}
|
||||
}
|
||||
function warnFileDeprecation(filename) {
|
||||
if (shouldWarn(true)) {
|
||||
const path = filename.replace(/.*yaml[/\\]/i, '').replace(/\.js$/, '').replace(/\\/g, '/');
|
||||
warn(`The endpoint 'yaml/${path}' will be removed in a future release.`, 'DeprecationWarning');
|
||||
}
|
||||
}
|
||||
const warned = {};
|
||||
function warnOptionDeprecation(name, alternative) {
|
||||
if (!warned[name] && shouldWarn(true)) {
|
||||
warned[name] = true;
|
||||
let msg = `The option '${name}' will be removed in a future release`;
|
||||
msg += alternative ? `, use '${alternative}' instead.` : '.';
|
||||
warn(msg, 'DeprecationWarning');
|
||||
}
|
||||
}
|
||||
|
||||
exports.binary = binary;
|
||||
exports.floatTime = floatTime;
|
||||
exports.intTime = intTime;
|
||||
exports.omap = omap;
|
||||
exports.pairs = pairs;
|
||||
exports.set = set;
|
||||
exports.timestamp = timestamp;
|
||||
exports.warn = warn;
|
||||
exports.warnFileDeprecation = warnFileDeprecation;
|
||||
exports.warnOptionDeprecation = warnOptionDeprecation;
|
372
node_modules/yaml/index.d.ts
generated
vendored
Normal file
372
node_modules/yaml/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,372 @@
|
|||
import { CST } from './parse-cst'
|
||||
import {
|
||||
AST,
|
||||
Alias,
|
||||
Collection,
|
||||
Merge,
|
||||
Node,
|
||||
Scalar,
|
||||
Schema,
|
||||
YAMLMap,
|
||||
YAMLSeq
|
||||
} from './types'
|
||||
import { Type, YAMLError, YAMLWarning } from './util'
|
||||
|
||||
export { AST, CST }
|
||||
export { default as parseCST } from './parse-cst'
|
||||
|
||||
/**
|
||||
* `yaml` defines document-specific options in three places: as an argument of
|
||||
* parse, create and stringify calls, in the values of `YAML.defaultOptions`,
|
||||
* and in the version-dependent `YAML.Document.defaults` object. Values set in
|
||||
* `YAML.defaultOptions` override version-dependent defaults, and argument
|
||||
* options override both.
|
||||
*/
|
||||
export const defaultOptions: Options
|
||||
|
||||
export interface Options extends Schema.Options {
|
||||
/**
|
||||
* Default prefix for anchors.
|
||||
*
|
||||
* Default: `'a'`, resulting in anchors `a1`, `a2`, etc.
|
||||
*/
|
||||
anchorPrefix?: string
|
||||
/**
|
||||
* The number of spaces to use when indenting code.
|
||||
*
|
||||
* Default: `2`
|
||||
*/
|
||||
indent?: number
|
||||
/**
|
||||
* Whether block sequences should be indented.
|
||||
*
|
||||
* Default: `true`
|
||||
*/
|
||||
indentSeq?: boolean
|
||||
/**
|
||||
* Allow non-JSON JavaScript objects to remain in the `toJSON` output.
|
||||
* Relevant with the YAML 1.1 `!!timestamp` and `!!binary` tags as well as BigInts.
|
||||
*
|
||||
* Default: `true`
|
||||
*/
|
||||
keepBlobsInJSON?: boolean
|
||||
/**
|
||||
* Include references in the AST to each node's corresponding CST node.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
keepCstNodes?: boolean
|
||||
/**
|
||||
* Store the original node type when parsing documents.
|
||||
*
|
||||
* Default: `true`
|
||||
*/
|
||||
keepNodeTypes?: boolean
|
||||
/**
|
||||
* When outputting JS, use Map rather than Object to represent mappings.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
mapAsMap?: boolean
|
||||
/**
|
||||
* Prevent exponential entity expansion attacks by limiting data aliasing count;
|
||||
* set to `-1` to disable checks; `0` disallows all alias nodes.
|
||||
*
|
||||
* Default: `100`
|
||||
*/
|
||||
maxAliasCount?: number
|
||||
/**
|
||||
* Include line position & node type directly in errors; drop their verbose source and context.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
prettyErrors?: boolean
|
||||
/**
|
||||
* When stringifying, require keys to be scalars and to use implicit rather than explicit notation.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
simpleKeys?: boolean
|
||||
/**
|
||||
* The YAML version used by documents without a `%YAML` directive.
|
||||
*
|
||||
* Default: `"1.2"`
|
||||
*/
|
||||
version?: '1.0' | '1.1' | '1.2'
|
||||
}
|
||||
|
||||
/**
|
||||
* Some customization options are availabe to control the parsing and
|
||||
* stringification of scalars. Note that these values are used by all documents.
|
||||
*/
|
||||
export const scalarOptions: {
|
||||
binary: scalarOptions.Binary
|
||||
bool: scalarOptions.Bool
|
||||
int: scalarOptions.Int
|
||||
null: scalarOptions.Null
|
||||
str: scalarOptions.Str
|
||||
}
|
||||
export namespace scalarOptions {
|
||||
interface Binary {
|
||||
/**
|
||||
* The type of string literal used to stringify `!!binary` values.
|
||||
*
|
||||
* Default: `'BLOCK_LITERAL'`
|
||||
*/
|
||||
defaultType: Scalar.Type
|
||||
/**
|
||||
* Maximum line width for `!!binary`.
|
||||
*
|
||||
* Default: `76`
|
||||
*/
|
||||
lineWidth: number
|
||||
}
|
||||
|
||||
interface Bool {
|
||||
/**
|
||||
* String representation for `true`. With the core schema, use `'true' | 'True' | 'TRUE'`.
|
||||
*
|
||||
* Default: `'true'`
|
||||
*/
|
||||
trueStr: string
|
||||
/**
|
||||
* String representation for `false`. With the core schema, use `'false' | 'False' | 'FALSE'`.
|
||||
*
|
||||
* Default: `'false'`
|
||||
*/
|
||||
falseStr: string
|
||||
}
|
||||
|
||||
interface Int {
|
||||
/**
|
||||
* Whether integers should be parsed into BigInt values.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
asBigInt: false
|
||||
}
|
||||
|
||||
interface Null {
|
||||
/**
|
||||
* String representation for `null`. With the core schema, use `'null' | 'Null' | 'NULL' | '~' | ''`.
|
||||
*
|
||||
* Default: `'null'`
|
||||
*/
|
||||
nullStr: string
|
||||
}
|
||||
|
||||
interface Str {
|
||||
/**
|
||||
* The default type of string literal used to stringify values
|
||||
*
|
||||
* Default: `'PLAIN'`
|
||||
*/
|
||||
defaultType: Scalar.Type
|
||||
doubleQuoted: {
|
||||
/**
|
||||
* Whether to restrict double-quoted strings to use JSON-compatible syntax.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
jsonEncoding: boolean
|
||||
/**
|
||||
* Minimum length to use multiple lines to represent the value.
|
||||
*
|
||||
* Default: `40`
|
||||
*/
|
||||
minMultiLineLength: number
|
||||
}
|
||||
fold: {
|
||||
/**
|
||||
* Maximum line width (set to `0` to disable folding).
|
||||
*
|
||||
* Default: `80`
|
||||
*/
|
||||
lineWidth: number
|
||||
/**
|
||||
* Minimum width for highly-indented content.
|
||||
*
|
||||
* Default: `20`
|
||||
*/
|
||||
minContentWidth: number
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class Document extends Collection {
|
||||
cstNode?: CST.Document
|
||||
constructor(options?: Options)
|
||||
tag: never
|
||||
directivesEndMarker?: boolean
|
||||
type: Type.DOCUMENT
|
||||
/**
|
||||
* Anchors associated with the document's nodes;
|
||||
* also provides alias & merge node creators.
|
||||
*/
|
||||
anchors: Document.Anchors
|
||||
/** The document contents. */
|
||||
contents: any
|
||||
/** Errors encountered during parsing. */
|
||||
errors: YAMLError[]
|
||||
/**
|
||||
* The schema used with the document. Use `setSchema()` to change or
|
||||
* initialise.
|
||||
*/
|
||||
schema?: Schema
|
||||
/**
|
||||
* Array of prefixes; each will have a string `handle` that
|
||||
* starts and ends with `!` and a string `prefix` that the handle will be replaced by.
|
||||
*/
|
||||
tagPrefixes: Document.TagPrefix[]
|
||||
/**
|
||||
* The parsed version of the source document;
|
||||
* if true-ish, stringified output will include a `%YAML` directive.
|
||||
*/
|
||||
version?: string
|
||||
/** Warnings encountered during parsing. */
|
||||
warnings: YAMLWarning[]
|
||||
/**
|
||||
* List the tags used in the document that are not in the default
|
||||
* `tag:yaml.org,2002:` namespace.
|
||||
*/
|
||||
listNonDefaultTags(): string[]
|
||||
/** Parse a CST into this document */
|
||||
parse(cst: CST.Document): this
|
||||
/**
|
||||
* When a document is created with `new YAML.Document()`, the schema object is
|
||||
* not set as it may be influenced by parsed directives; call this with no
|
||||
* arguments to set it manually, or with arguments to change the schema used
|
||||
* by the document.
|
||||
**/
|
||||
setSchema(
|
||||
id?: Options['version'] | Schema.Name,
|
||||
customTags?: (Schema.TagId | Schema.Tag)[]
|
||||
): void
|
||||
/** Set `handle` as a shorthand string for the `prefix` tag namespace. */
|
||||
setTagPrefix(handle: string, prefix: string): void
|
||||
/**
|
||||
* A plain JavaScript representation of the document `contents`.
|
||||
*
|
||||
* @param arg Used by `JSON.stringify` to indicate the array index or property
|
||||
* name. If its value is a `string` and the document `contents` has a scalar
|
||||
* value, the `keepBlobsInJSON` option has no effect.
|
||||
* @param onAnchor If defined, called with the resolved `value` and reference
|
||||
* `count` for each anchor in the document.
|
||||
* */
|
||||
toJSON(arg?: string, onAnchor?: (value: any, count: number) => void): any
|
||||
/** A YAML representation of the document. */
|
||||
toString(): string
|
||||
}
|
||||
|
||||
export namespace Document {
|
||||
interface Parsed extends Document {
|
||||
contents: Node | null
|
||||
/** The schema used with the document. */
|
||||
schema: Schema
|
||||
}
|
||||
|
||||
interface Anchors {
|
||||
/**
|
||||
* Create a new `Alias` node, adding the required anchor for `node`.
|
||||
* If `name` is empty, a new anchor name will be generated.
|
||||
*/
|
||||
createAlias(node: Node, name?: string): Alias
|
||||
/**
|
||||
* Create a new `Merge` node with the given source nodes.
|
||||
* Non-`Alias` sources will be automatically wrapped.
|
||||
*/
|
||||
createMergePair(...nodes: Node[]): Merge
|
||||
/** The anchor name associated with `node`, if set. */
|
||||
getName(node: Node): undefined | string
|
||||
/** List of all defined anchor names. */
|
||||
getNames(): string[]
|
||||
/** The node associated with the anchor `name`, if set. */
|
||||
getNode(name: string): undefined | Node
|
||||
/**
|
||||
* Find an available anchor name with the given `prefix` and a
|
||||
* numerical suffix.
|
||||
*/
|
||||
newName(prefix: string): string
|
||||
/**
|
||||
* Associate an anchor with `node`. If `name` is empty, a new name will be generated.
|
||||
* To remove an anchor, use `setAnchor(null, name)`.
|
||||
*/
|
||||
setAnchor(node: Node | null, name?: string): void | string
|
||||
}
|
||||
|
||||
interface TagPrefix {
|
||||
handle: string
|
||||
prefix: string
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recursively turns objects into collections. Generic objects as well as `Map`
|
||||
* and its descendants become mappings, while arrays and other iterable objects
|
||||
* result in sequences.
|
||||
*
|
||||
* The primary purpose of this function is to enable attaching comments or other
|
||||
* metadata to a value, or to otherwise exert more fine-grained control over the
|
||||
* stringified output. To that end, you'll need to assign its return value to
|
||||
* the `contents` of a Document (or somewhere within said contents), as the
|
||||
* document's schema is required for YAML string output.
|
||||
*
|
||||
* @param wrapScalars If undefined or `true`, also wraps plain values in
|
||||
* `Scalar` objects; if `false` and `value` is not an object, it will be
|
||||
* returned directly.
|
||||
* @param tag Use to specify the collection type, e.g. `"!!omap"`. Note that
|
||||
* this requires the corresponding tag to be available based on the default
|
||||
* options. To use a specific document's schema, use `doc.schema.createNode`.
|
||||
*/
|
||||
export function createNode(
|
||||
value: any,
|
||||
wrapScalars?: true,
|
||||
tag?: string
|
||||
): YAMLMap | YAMLSeq | Scalar
|
||||
|
||||
/**
|
||||
* YAML.createNode recursively turns objects into Map and arrays to Seq collections.
|
||||
* Its primary use is to enable attaching comments or other metadata to a value,
|
||||
* or to otherwise exert more fine-grained control over the stringified output.
|
||||
*
|
||||
* Doesn't wrap plain values in Scalar objects.
|
||||
*/
|
||||
export function createNode(
|
||||
value: any,
|
||||
wrapScalars: false,
|
||||
tag?: string
|
||||
): YAMLMap | YAMLSeq | string | number | boolean | null
|
||||
|
||||
/**
|
||||
* Parse an input string into a single YAML.Document.
|
||||
*/
|
||||
export function parseDocument(str: string, options?: Options): Document.Parsed
|
||||
|
||||
/**
|
||||
* Parse the input as a stream of YAML documents.
|
||||
*
|
||||
* Documents should be separated from each other by `...` or `---` marker lines.
|
||||
*/
|
||||
export function parseAllDocuments(
|
||||
str: string,
|
||||
options?: Options
|
||||
): Document.Parsed[]
|
||||
|
||||
/**
|
||||
* Parse an input string into JavaScript.
|
||||
*
|
||||
* Only supports input consisting of a single YAML document; for multi-document
|
||||
* support you should use `YAML.parseAllDocuments`. May throw on error, and may
|
||||
* log warnings using `console.warn`.
|
||||
*
|
||||
* @param str A string with YAML formatting.
|
||||
* @returns The value will match the type of the root value of the parsed YAML
|
||||
* document, so Maps become objects, Sequences arrays, and scalars result in
|
||||
* nulls, booleans, numbers and strings.
|
||||
*/
|
||||
export function parse(str: string, options?: Options): any
|
||||
|
||||
/**
|
||||
* @returns Will always include \n as the last character, as is expected of YAML documents.
|
||||
*/
|
||||
export function stringify(value: any, options?: Options): string
|
1
node_modules/yaml/index.js
generated
vendored
Normal file
1
node_modules/yaml/index.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require('./dist').YAML
|
2
node_modules/yaml/map.js
generated
vendored
Normal file
2
node_modules/yaml/map.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
module.exports = require('./dist/types').YAMLMap
|
||||
require('./dist/legacy-exports').warnFileDeprecation(__filename)
|
106
node_modules/yaml/package.json
generated
vendored
Normal file
106
node_modules/yaml/package.json
generated
vendored
Normal file
|
@ -0,0 +1,106 @@
|
|||
{
|
||||
"name": "yaml",
|
||||
"version": "1.10.0",
|
||||
"license": "ISC",
|
||||
"author": "Eemeli Aro <eemeli@gmail.com>",
|
||||
"repository": "github:eemeli/yaml",
|
||||
"description": "JavaScript parser and stringifier for YAML",
|
||||
"keywords": [
|
||||
"YAML",
|
||||
"parser",
|
||||
"stringifier"
|
||||
],
|
||||
"homepage": "https://eemeli.org/yaml/",
|
||||
"files": [
|
||||
"browser/",
|
||||
"dist/",
|
||||
"types/",
|
||||
"*.d.ts",
|
||||
"*.js",
|
||||
"*.mjs",
|
||||
"!*config.js"
|
||||
],
|
||||
"type": "commonjs",
|
||||
"main": "./index.js",
|
||||
"browser": {
|
||||
"./index.js": "./browser/index.js",
|
||||
"./map.js": "./browser/map.js",
|
||||
"./pair.js": "./browser/pair.js",
|
||||
"./parse-cst.js": "./browser/parse-cst.js",
|
||||
"./scalar.js": "./browser/scalar.js",
|
||||
"./schema.js": "./browser/schema.js",
|
||||
"./seq.js": "./browser/seq.js",
|
||||
"./types.js": "./browser/types.js",
|
||||
"./types.mjs": "./browser/types.js",
|
||||
"./types/binary.js": "./browser/types/binary.js",
|
||||
"./types/omap.js": "./browser/types/omap.js",
|
||||
"./types/pairs.js": "./browser/types/pairs.js",
|
||||
"./types/set.js": "./browser/types/set.js",
|
||||
"./types/timestamp.js": "./browser/types/timestamp.js",
|
||||
"./util.js": "./browser/util.js",
|
||||
"./util.mjs": "./browser/util.js"
|
||||
},
|
||||
"exports": {
|
||||
".": "./index.js",
|
||||
"./parse-cst": "./parse-cst.js",
|
||||
"./types": [
|
||||
{
|
||||
"import": "./types.mjs"
|
||||
},
|
||||
"./types.js"
|
||||
],
|
||||
"./util": [
|
||||
{
|
||||
"import": "./util.mjs"
|
||||
},
|
||||
"./util.js"
|
||||
],
|
||||
"./": "./"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "npm run build:node && npm run build:browser",
|
||||
"build:browser": "rollup -c rollup.browser-config.js",
|
||||
"build:node": "rollup -c rollup.node-config.js",
|
||||
"clean": "git clean -fdxe node_modules",
|
||||
"lint": "eslint src/",
|
||||
"prettier": "prettier --write .",
|
||||
"start": "cross-env TRACE_LEVEL=log npm run build:node && node -i -e 'YAML=require(\".\")'",
|
||||
"test": "jest",
|
||||
"test:dist": "npm run build:node && jest",
|
||||
"test:types": "tsc --lib ES2017 --noEmit tests/typings.ts",
|
||||
"docs:install": "cd docs-slate && bundle install",
|
||||
"docs:deploy": "cd docs-slate && ./deploy.sh",
|
||||
"docs": "cd docs-slate && bundle exec middleman server",
|
||||
"preversion": "npm test && npm run build",
|
||||
"prepublishOnly": "npm run clean && npm test && npm run build"
|
||||
},
|
||||
"browserslist": "> 0.5%, not dead",
|
||||
"prettier": {
|
||||
"arrowParens": "avoid",
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.9.6",
|
||||
"@babel/plugin-proposal-class-properties": "^7.8.3",
|
||||
"@babel/preset-env": "^7.9.6",
|
||||
"@rollup/plugin-babel": "^5.0.0",
|
||||
"babel-eslint": "^10.1.0",
|
||||
"babel-jest": "^26.0.1",
|
||||
"babel-plugin-trace": "^1.1.0",
|
||||
"common-tags": "^1.8.0",
|
||||
"cross-env": "^7.0.2",
|
||||
"eslint": "^7.0.0",
|
||||
"eslint-config-prettier": "^6.11.0",
|
||||
"eslint-plugin-prettier": "^3.1.3",
|
||||
"fast-check": "^1.24.2",
|
||||
"jest": "^26.0.1",
|
||||
"prettier": "^2.0.5",
|
||||
"rollup": "^2.10.2",
|
||||
"typescript": "^3.9.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
}
|
2
node_modules/yaml/pair.js
generated
vendored
Normal file
2
node_modules/yaml/pair.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
module.exports = require('./dist/types').Pair
|
||||
require('./dist/legacy-exports').warnFileDeprecation(__filename)
|
187
node_modules/yaml/parse-cst.d.ts
generated
vendored
Normal file
187
node_modules/yaml/parse-cst.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,187 @@
|
|||
import { Type, YAMLSyntaxError } from './util'
|
||||
|
||||
export default function parseCST(str: string): ParsedCST
|
||||
|
||||
export interface ParsedCST extends Array<CST.Document> {
|
||||
setOrigRanges(): boolean
|
||||
}
|
||||
|
||||
export namespace CST {
|
||||
interface Range {
|
||||
start: number
|
||||
end: number
|
||||
origStart?: number
|
||||
origEnd?: number
|
||||
isEmpty(): boolean
|
||||
}
|
||||
|
||||
interface ParseContext {
|
||||
/** Node starts at beginning of line */
|
||||
atLineStart: boolean
|
||||
/** true if currently in a collection context */
|
||||
inCollection: boolean
|
||||
/** true if currently in a flow context */
|
||||
inFlow: boolean
|
||||
/** Current level of indentation */
|
||||
indent: number
|
||||
/** Start of the current line */
|
||||
lineStart: number
|
||||
/** The parent of the node */
|
||||
parent: Node
|
||||
/** Source of the YAML document */
|
||||
src: string
|
||||
}
|
||||
|
||||
interface Node {
|
||||
context: ParseContext | null
|
||||
/** if not null, indicates a parser failure */
|
||||
error: YAMLSyntaxError | null
|
||||
/** span of context.src parsed into this node */
|
||||
range: Range | null
|
||||
valueRange: Range | null
|
||||
/** anchors, tags and comments */
|
||||
props: Range[]
|
||||
/** specific node type */
|
||||
type: Type
|
||||
/** if non-null, overrides source value */
|
||||
value: string | null
|
||||
|
||||
readonly anchor: string | null
|
||||
readonly comment: string | null
|
||||
readonly hasComment: boolean
|
||||
readonly hasProps: boolean
|
||||
readonly jsonLike: boolean
|
||||
readonly rawValue: string | null
|
||||
readonly tag:
|
||||
| null
|
||||
| { verbatim: string }
|
||||
| { handle: string; suffix: string }
|
||||
readonly valueRangeContainsNewline: boolean
|
||||
}
|
||||
|
||||
interface Alias extends Node {
|
||||
type: Type.ALIAS
|
||||
/** contain the anchor without the * prefix */
|
||||
readonly rawValue: string
|
||||
}
|
||||
|
||||
type Scalar = BlockValue | PlainValue | QuoteValue
|
||||
|
||||
interface BlockValue extends Node {
|
||||
type: Type.BLOCK_FOLDED | Type.BLOCK_LITERAL
|
||||
chomping: 'CLIP' | 'KEEP' | 'STRIP'
|
||||
blockIndent: number | null
|
||||
header: Range
|
||||
readonly strValue: string | null
|
||||
}
|
||||
|
||||
interface BlockFolded extends BlockValue {
|
||||
type: Type.BLOCK_FOLDED
|
||||
}
|
||||
|
||||
interface BlockLiteral extends BlockValue {
|
||||
type: Type.BLOCK_LITERAL
|
||||
}
|
||||
|
||||
interface PlainValue extends Node {
|
||||
type: Type.PLAIN
|
||||
readonly strValue: string | null
|
||||
}
|
||||
|
||||
interface QuoteValue extends Node {
|
||||
type: Type.QUOTE_DOUBLE | Type.QUOTE_SINGLE
|
||||
readonly strValue:
|
||||
| null
|
||||
| string
|
||||
| { str: string; errors: YAMLSyntaxError[] }
|
||||
}
|
||||
|
||||
interface QuoteDouble extends QuoteValue {
|
||||
type: Type.QUOTE_DOUBLE
|
||||
}
|
||||
|
||||
interface QuoteSingle extends QuoteValue {
|
||||
type: Type.QUOTE_SINGLE
|
||||
}
|
||||
|
||||
interface Comment extends Node {
|
||||
type: Type.COMMENT
|
||||
readonly anchor: null
|
||||
readonly comment: string
|
||||
readonly rawValue: null
|
||||
readonly tag: null
|
||||
}
|
||||
|
||||
interface BlankLine extends Node {
|
||||
type: Type.BLANK_LINE
|
||||
}
|
||||
|
||||
interface MapItem extends Node {
|
||||
type: Type.MAP_KEY | Type.MAP_VALUE
|
||||
node: ContentNode | null
|
||||
}
|
||||
|
||||
interface MapKey extends MapItem {
|
||||
type: Type.MAP_KEY
|
||||
}
|
||||
|
||||
interface MapValue extends MapItem {
|
||||
type: Type.MAP_VALUE
|
||||
}
|
||||
|
||||
interface Map extends Node {
|
||||
type: Type.MAP
|
||||
/** implicit keys are not wrapped */
|
||||
items: Array<BlankLine | Comment | Alias | Scalar | MapItem>
|
||||
}
|
||||
|
||||
interface SeqItem extends Node {
|
||||
type: Type.SEQ_ITEM
|
||||
node: ContentNode | null
|
||||
}
|
||||
|
||||
interface Seq extends Node {
|
||||
type: Type.SEQ
|
||||
items: Array<BlankLine | Comment | SeqItem>
|
||||
}
|
||||
|
||||
interface FlowChar {
|
||||
char: '{' | '}' | '[' | ']' | ',' | '?' | ':'
|
||||
offset: number
|
||||
origOffset?: number
|
||||
}
|
||||
|
||||
interface FlowCollection extends Node {
|
||||
type: Type.FLOW_MAP | Type.FLOW_SEQ
|
||||
items: Array<
|
||||
FlowChar | BlankLine | Comment | Alias | Scalar | FlowCollection
|
||||
>
|
||||
}
|
||||
|
||||
interface FlowMap extends FlowCollection {
|
||||
type: Type.FLOW_MAP
|
||||
}
|
||||
|
||||
interface FlowSeq extends FlowCollection {
|
||||
type: Type.FLOW_SEQ
|
||||
}
|
||||
|
||||
type ContentNode = Alias | Scalar | Map | Seq | FlowCollection
|
||||
|
||||
interface Directive extends Node {
|
||||
type: Type.DIRECTIVE
|
||||
name: string
|
||||
readonly anchor: null
|
||||
readonly parameters: string[]
|
||||
readonly tag: null
|
||||
}
|
||||
|
||||
interface Document extends Node {
|
||||
type: Type.DOCUMENT
|
||||
directives: Array<BlankLine | Comment | Directive>
|
||||
contents: Array<BlankLine | Comment | ContentNode>
|
||||
readonly anchor: null
|
||||
readonly comment: null
|
||||
readonly tag: null
|
||||
}
|
||||
}
|
1
node_modules/yaml/parse-cst.js
generated
vendored
Normal file
1
node_modules/yaml/parse-cst.js
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
module.exports = require('./dist/parse-cst').parse
|
2
node_modules/yaml/scalar.js
generated
vendored
Normal file
2
node_modules/yaml/scalar.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
module.exports = require('./dist/types').Scalar
|
||||
require('./dist/legacy-exports').warnFileDeprecation(__filename)
|
9
node_modules/yaml/schema.js
generated
vendored
Normal file
9
node_modules/yaml/schema.js
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
const types = require('./dist/types')
|
||||
const util = require('./dist/util')
|
||||
|
||||
module.exports = types.Schema
|
||||
module.exports.nullOptions = types.nullOptions
|
||||
module.exports.strOptions = types.strOptions
|
||||
module.exports.stringify = util.stringifyString
|
||||
|
||||
require('./dist/legacy-exports').warnFileDeprecation(__filename)
|
2
node_modules/yaml/seq.js
generated
vendored
Normal file
2
node_modules/yaml/seq.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
module.exports = require('./dist/types').YAMLSeq
|
||||
require('./dist/legacy-exports').warnFileDeprecation(__filename)
|
407
node_modules/yaml/types.d.ts
generated
vendored
Normal file
407
node_modules/yaml/types.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,407 @@
|
|||
import { Document, scalarOptions } from './index'
|
||||
import { CST } from './parse-cst'
|
||||
import { Type } from './util'
|
||||
|
||||
export const binaryOptions: scalarOptions.Binary
|
||||
export const boolOptions: scalarOptions.Bool
|
||||
export const intOptions: scalarOptions.Int
|
||||
export const nullOptions: scalarOptions.Null
|
||||
export const strOptions: scalarOptions.Str
|
||||
|
||||
export class Schema {
|
||||
/** Default: `'tag:yaml.org,2002:'` */
|
||||
static defaultPrefix: string
|
||||
static defaultTags: {
|
||||
/** Default: `'tag:yaml.org,2002:map'` */
|
||||
MAP: string
|
||||
/** Default: `'tag:yaml.org,2002:seq'` */
|
||||
SEQ: string
|
||||
/** Default: `'tag:yaml.org,2002:str'` */
|
||||
STR: string
|
||||
}
|
||||
constructor(options: Schema.Options)
|
||||
/**
|
||||
* Convert any value into a `Node` using this schema, recursively turning
|
||||
* objects into collections.
|
||||
*
|
||||
* @param wrapScalars If `true`, also wraps plain values in `Scalar` objects;
|
||||
* if undefined or `false` and `value` is not an object, it will be returned
|
||||
* directly.
|
||||
* @param tag Use to specify the collection type, e.g. `"!!omap"`. Note that
|
||||
* this requires the corresponding tag to be available in this schema.
|
||||
*/
|
||||
createNode(
|
||||
value: any,
|
||||
wrapScalars?: boolean,
|
||||
tag?: string,
|
||||
ctx?: Schema.CreateNodeContext
|
||||
): Node
|
||||
/**
|
||||
* Convert a key and a value into a `Pair` using this schema, recursively
|
||||
* wrapping all values as `Scalar` or `Collection` nodes.
|
||||
*
|
||||
* @param ctx To not wrap scalars, use a context `{ wrapScalars: false }`
|
||||
*/
|
||||
createPair(key: any, value: any, ctx?: Schema.CreateNodeContext): Pair
|
||||
merge: boolean
|
||||
name: Schema.Name
|
||||
sortMapEntries: ((a: Pair, b: Pair) => number) | null
|
||||
tags: Schema.Tag[]
|
||||
}
|
||||
|
||||
export namespace Schema {
|
||||
type Name = 'core' | 'failsafe' | 'json' | 'yaml-1.1'
|
||||
|
||||
interface Options {
|
||||
/**
|
||||
* Array of additional tags to include in the schema, or a function that may
|
||||
* modify the schema's base tag array.
|
||||
*/
|
||||
customTags?: (TagId | Tag)[] | ((tags: Tag[]) => Tag[])
|
||||
/**
|
||||
* Enable support for `<<` merge keys.
|
||||
*
|
||||
* Default: `false` for YAML 1.2, `true` for earlier versions
|
||||
*/
|
||||
merge?: boolean
|
||||
/**
|
||||
* The base schema to use.
|
||||
*
|
||||
* Default: `"core"` for YAML 1.2, `"yaml-1.1"` for earlier versions
|
||||
*/
|
||||
schema?: Name
|
||||
/**
|
||||
* When stringifying, sort map entries. If `true`, sort by comparing key values with `<`.
|
||||
*
|
||||
* Default: `false`
|
||||
*/
|
||||
sortMapEntries?: boolean | ((a: Pair, b: Pair) => number)
|
||||
/**
|
||||
* @deprecated Use `customTags` instead.
|
||||
*/
|
||||
tags?: Options['customTags']
|
||||
}
|
||||
|
||||
interface CreateNodeContext {
|
||||
wrapScalars?: boolean
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface StringifyContext {
|
||||
forceBlockIndent?: boolean
|
||||
implicitKey?: boolean
|
||||
indent?: string
|
||||
indentAtStart?: number
|
||||
inFlow?: boolean
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
type TagId =
|
||||
| 'binary'
|
||||
| 'bool'
|
||||
| 'float'
|
||||
| 'floatExp'
|
||||
| 'floatNaN'
|
||||
| 'floatTime'
|
||||
| 'int'
|
||||
| 'intHex'
|
||||
| 'intOct'
|
||||
| 'intTime'
|
||||
| 'null'
|
||||
| 'omap'
|
||||
| 'pairs'
|
||||
| 'set'
|
||||
| 'timestamp'
|
||||
|
||||
type Tag = CustomTag | DefaultTag
|
||||
|
||||
interface BaseTag {
|
||||
/**
|
||||
* An optional factory function, used e.g. by collections when wrapping JS objects as AST nodes.
|
||||
*/
|
||||
createNode?: (
|
||||
schema: Schema,
|
||||
value: any,
|
||||
ctx: Schema.CreateNodeContext
|
||||
) => YAMLMap | YAMLSeq | Scalar
|
||||
/**
|
||||
* If a tag has multiple forms that should be parsed and/or stringified differently, use `format` to identify them.
|
||||
*/
|
||||
format?: string
|
||||
/**
|
||||
* Used by `YAML.createNode` to detect your data type, e.g. using `typeof` or
|
||||
* `instanceof`.
|
||||
*/
|
||||
identify(value: any): boolean
|
||||
/**
|
||||
* The `Node` child class that implements this tag. Required for collections and tags that have overlapping JS representations.
|
||||
*/
|
||||
nodeClass?: new () => any
|
||||
/**
|
||||
* Used by some tags to configure their stringification, where applicable.
|
||||
*/
|
||||
options?: object
|
||||
/**
|
||||
* Optional function stringifying the AST node in the current context. If your
|
||||
* data includes a suitable `.toString()` method, you can probably leave this
|
||||
* undefined and use the default stringifier.
|
||||
*
|
||||
* @param item The node being stringified.
|
||||
* @param ctx Contains the stringifying context variables.
|
||||
* @param onComment Callback to signal that the stringifier includes the
|
||||
* item's comment in its output.
|
||||
* @param onChompKeep Callback to signal that the output uses a block scalar
|
||||
* type with the `+` chomping indicator.
|
||||
*/
|
||||
stringify?: (
|
||||
item: Node,
|
||||
ctx: Schema.StringifyContext,
|
||||
onComment?: () => void,
|
||||
onChompKeep?: () => void
|
||||
) => string
|
||||
/**
|
||||
* The identifier for your data type, with which its stringified form will be
|
||||
* prefixed. Should either be a !-prefixed local `!tag`, or a fully qualified
|
||||
* `tag:domain,date:foo`.
|
||||
*/
|
||||
tag: string
|
||||
}
|
||||
|
||||
interface CustomTag extends BaseTag {
|
||||
/**
|
||||
* A JavaScript class that should be matched to this tag, e.g. `Date` for `!!timestamp`.
|
||||
* @deprecated Use `Tag.identify` instead
|
||||
*/
|
||||
class?: new () => any
|
||||
/**
|
||||
* Turns a CST node into an AST node. If returning a non-`Node` value, the
|
||||
* output will be wrapped as a `Scalar`.
|
||||
*/
|
||||
resolve(doc: Document, cstNode: CST.Node): Node | any
|
||||
}
|
||||
|
||||
interface DefaultTag extends BaseTag {
|
||||
/**
|
||||
* If `true`, together with `test` allows for values to be stringified without
|
||||
* an explicit tag. For most cases, it's unlikely that you'll actually want to
|
||||
* use this, even if you first think you do.
|
||||
*/
|
||||
default: true
|
||||
/**
|
||||
* Alternative form used by default tags; called with `test` match results.
|
||||
*/
|
||||
resolve(...match: string[]): Node | any
|
||||
/**
|
||||
* Together with `default` allows for values to be stringified without an
|
||||
* explicit tag and detected using a regular expression. For most cases, it's
|
||||
* unlikely that you'll actually want to use these, even if you first think
|
||||
* you do.
|
||||
*/
|
||||
test: RegExp
|
||||
}
|
||||
}
|
||||
|
||||
export class Node {
|
||||
/** A comment on or immediately after this */
|
||||
comment?: string | null
|
||||
/** A comment before this */
|
||||
commentBefore?: string | null
|
||||
/** Only available when `keepCstNodes` is set to `true` */
|
||||
cstNode?: CST.Node
|
||||
/**
|
||||
* The [start, end] range of characters of the source parsed
|
||||
* into this node (undefined for pairs or if not parsed)
|
||||
*/
|
||||
range?: [number, number] | null
|
||||
/** A blank line before this node and its commentBefore */
|
||||
spaceBefore?: boolean
|
||||
/** A fully qualified tag, if required */
|
||||
tag?: string
|
||||
/** A plain JS representation of this node */
|
||||
toJSON(arg?: any): any
|
||||
/** The type of this node */
|
||||
type?: Type | Pair.Type
|
||||
}
|
||||
|
||||
export class Scalar extends Node {
|
||||
constructor(value: any)
|
||||
type?: Scalar.Type
|
||||
/**
|
||||
* By default (undefined), numbers use decimal notation.
|
||||
* The YAML 1.2 core schema only supports 'HEX' and 'OCT'.
|
||||
*/
|
||||
format?: 'BIN' | 'HEX' | 'OCT' | 'TIME'
|
||||
value: any
|
||||
toJSON(arg?: any, ctx?: AST.NodeToJsonContext): any
|
||||
toString(): string
|
||||
}
|
||||
export namespace Scalar {
|
||||
type Type =
|
||||
| Type.BLOCK_FOLDED
|
||||
| Type.BLOCK_LITERAL
|
||||
| Type.PLAIN
|
||||
| Type.QUOTE_DOUBLE
|
||||
| Type.QUOTE_SINGLE
|
||||
}
|
||||
|
||||
export class Alias extends Node {
|
||||
type: Type.ALIAS
|
||||
source: Node
|
||||
cstNode?: CST.Alias
|
||||
toString(ctx: Schema.StringifyContext): string
|
||||
}
|
||||
|
||||
export class Pair extends Node {
|
||||
constructor(key: any, value?: any)
|
||||
type: Pair.Type.PAIR | Pair.Type.MERGE_PAIR
|
||||
/** Always Node or null when parsed, but can be set to anything. */
|
||||
key: any
|
||||
/** Always Node or null when parsed, but can be set to anything. */
|
||||
value: any
|
||||
cstNode?: never // no corresponding cstNode
|
||||
toJSON(arg?: any, ctx?: AST.NodeToJsonContext): object | Map<any, any>
|
||||
toString(
|
||||
ctx?: Schema.StringifyContext,
|
||||
onComment?: () => void,
|
||||
onChompKeep?: () => void
|
||||
): string
|
||||
}
|
||||
export namespace Pair {
|
||||
enum Type {
|
||||
PAIR = 'PAIR',
|
||||
MERGE_PAIR = 'MERGE_PAIR'
|
||||
}
|
||||
}
|
||||
|
||||
export class Merge extends Pair {
|
||||
type: Pair.Type.MERGE_PAIR
|
||||
/** Always Scalar('<<'), defined by the type specification */
|
||||
key: AST.PlainValue
|
||||
/** Always YAMLSeq<Alias(Map)>, stringified as *A if length = 1 */
|
||||
value: YAMLSeq
|
||||
toString(ctx?: Schema.StringifyContext, onComment?: () => void): string
|
||||
}
|
||||
|
||||
export class Collection extends Node {
|
||||
type?: Type.MAP | Type.FLOW_MAP | Type.SEQ | Type.FLOW_SEQ | Type.DOCUMENT
|
||||
items: any[]
|
||||
schema?: Schema
|
||||
|
||||
/**
|
||||
* Adds a value to the collection. For `!!map` and `!!omap` the value must
|
||||
* be a Pair instance or a `{ key, value }` object, which may not have a key
|
||||
* that already exists in the map.
|
||||
*/
|
||||
add(value: any): void
|
||||
addIn(path: Iterable<any>, value: any): void
|
||||
/**
|
||||
* Removes a value from the collection.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
delete(key: any): boolean
|
||||
deleteIn(path: Iterable<any>): boolean
|
||||
/**
|
||||
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
get(key: any, keepScalar?: boolean): any
|
||||
getIn(path: Iterable<any>, keepScalar?: boolean): any
|
||||
/**
|
||||
* Checks if the collection includes a value with the key `key`.
|
||||
*/
|
||||
has(key: any): boolean
|
||||
hasIn(path: Iterable<any>): boolean
|
||||
/**
|
||||
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
set(key: any, value: any): void
|
||||
setIn(path: Iterable<any>, value: any): void
|
||||
}
|
||||
|
||||
export class YAMLMap extends Collection {
|
||||
type?: Type.FLOW_MAP | Type.MAP
|
||||
items: Array<Pair>
|
||||
hasAllNullValues(): boolean
|
||||
toJSON(arg?: any, ctx?: AST.NodeToJsonContext): object | Map<any, any>
|
||||
toString(
|
||||
ctx?: Schema.StringifyContext,
|
||||
onComment?: () => void,
|
||||
onChompKeep?: () => void
|
||||
): string
|
||||
}
|
||||
|
||||
export class YAMLSeq extends Collection {
|
||||
type?: Type.FLOW_SEQ | Type.SEQ
|
||||
delete(key: number | string | Scalar): boolean
|
||||
get(key: number | string | Scalar, keepScalar?: boolean): any
|
||||
has(key: number | string | Scalar): boolean
|
||||
set(key: number | string | Scalar, value: any): void
|
||||
hasAllNullValues(): boolean
|
||||
toJSON(arg?: any, ctx?: AST.NodeToJsonContext): any[]
|
||||
toString(
|
||||
ctx?: Schema.StringifyContext,
|
||||
onComment?: () => void,
|
||||
onChompKeep?: () => void
|
||||
): string
|
||||
}
|
||||
|
||||
export namespace AST {
|
||||
interface NodeToJsonContext {
|
||||
anchors?: any[]
|
||||
doc: Document
|
||||
keep?: boolean
|
||||
mapAsMap?: boolean
|
||||
maxAliasCount?: number
|
||||
onCreate?: (node: Node) => void
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
interface BlockFolded extends Scalar {
|
||||
type: Type.BLOCK_FOLDED
|
||||
cstNode?: CST.BlockFolded
|
||||
}
|
||||
|
||||
interface BlockLiteral extends Scalar {
|
||||
type: Type.BLOCK_LITERAL
|
||||
cstNode?: CST.BlockLiteral
|
||||
}
|
||||
|
||||
interface PlainValue extends Scalar {
|
||||
type: Type.PLAIN
|
||||
cstNode?: CST.PlainValue
|
||||
}
|
||||
|
||||
interface QuoteDouble extends Scalar {
|
||||
type: Type.QUOTE_DOUBLE
|
||||
cstNode?: CST.QuoteDouble
|
||||
}
|
||||
|
||||
interface QuoteSingle extends Scalar {
|
||||
type: Type.QUOTE_SINGLE
|
||||
cstNode?: CST.QuoteSingle
|
||||
}
|
||||
|
||||
interface FlowMap extends YAMLMap {
|
||||
type: Type.FLOW_MAP
|
||||
cstNode?: CST.FlowMap
|
||||
}
|
||||
|
||||
interface BlockMap extends YAMLMap {
|
||||
type: Type.MAP
|
||||
cstNode?: CST.Map
|
||||
}
|
||||
|
||||
interface FlowSeq extends YAMLSeq {
|
||||
type: Type.FLOW_SEQ
|
||||
items: Array<Node>
|
||||
cstNode?: CST.FlowSeq
|
||||
}
|
||||
|
||||
interface BlockSeq extends YAMLSeq {
|
||||
type: Type.SEQ
|
||||
items: Array<Node | null>
|
||||
cstNode?: CST.Seq
|
||||
}
|
||||
}
|
17
node_modules/yaml/types.js
generated
vendored
Normal file
17
node_modules/yaml/types.js
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
const types = require('./dist/types')
|
||||
|
||||
exports.binaryOptions = types.binaryOptions
|
||||
exports.boolOptions = types.boolOptions
|
||||
exports.intOptions = types.intOptions
|
||||
exports.nullOptions = types.nullOptions
|
||||
exports.strOptions = types.strOptions
|
||||
|
||||
exports.Schema = types.Schema
|
||||
exports.Alias = types.Alias
|
||||
exports.Collection = types.Collection
|
||||
exports.Merge = types.Merge
|
||||
exports.Node = types.Node
|
||||
exports.Pair = types.Pair
|
||||
exports.Scalar = types.Scalar
|
||||
exports.YAMLMap = types.YAMLMap
|
||||
exports.YAMLSeq = types.YAMLSeq
|
17
node_modules/yaml/types.mjs
generated
vendored
Normal file
17
node_modules/yaml/types.mjs
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
import types from './dist/types.js'
|
||||
|
||||
export const binaryOptions = types.binaryOptions
|
||||
export const boolOptions = types.boolOptions
|
||||
export const intOptions = types.intOptions
|
||||
export const nullOptions = types.nullOptions
|
||||
export const strOptions = types.strOptions
|
||||
|
||||
export const Schema = types.Schema
|
||||
export const Alias = types.Alias
|
||||
export const Collection = types.Collection
|
||||
export const Merge = types.Merge
|
||||
export const Node = types.Node
|
||||
export const Pair = types.Pair
|
||||
export const Scalar = types.Scalar
|
||||
export const YAMLMap = types.YAMLMap
|
||||
export const YAMLSeq = types.YAMLSeq
|
8
node_modules/yaml/types/binary.js
generated
vendored
Normal file
8
node_modules/yaml/types/binary.js
generated
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
'use strict'
|
||||
Object.defineProperty(exports, '__esModule', { value: true })
|
||||
|
||||
const legacy = require('../dist/legacy-exports')
|
||||
exports.binary = legacy.binary
|
||||
exports.default = [exports.binary]
|
||||
|
||||
legacy.warnFileDeprecation(__filename)
|
3
node_modules/yaml/types/omap.js
generated
vendored
Normal file
3
node_modules/yaml/types/omap.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
const legacy = require('../dist/legacy-exports')
|
||||
module.exports = legacy.omap
|
||||
legacy.warnFileDeprecation(__filename)
|
3
node_modules/yaml/types/pairs.js
generated
vendored
Normal file
3
node_modules/yaml/types/pairs.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
const legacy = require('../dist/legacy-exports')
|
||||
module.exports = legacy.pairs
|
||||
legacy.warnFileDeprecation(__filename)
|
3
node_modules/yaml/types/set.js
generated
vendored
Normal file
3
node_modules/yaml/types/set.js
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
const legacy = require('../dist/legacy-exports')
|
||||
module.exports = legacy.set
|
||||
legacy.warnFileDeprecation(__filename)
|
10
node_modules/yaml/types/timestamp.js
generated
vendored
Normal file
10
node_modules/yaml/types/timestamp.js
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
'use strict'
|
||||
Object.defineProperty(exports, '__esModule', { value: true })
|
||||
|
||||
const legacy = require('../dist/legacy-exports')
|
||||
exports.default = [legacy.intTime, legacy.floatTime, legacy.timestamp]
|
||||
exports.floatTime = legacy.floatTime
|
||||
exports.intTime = legacy.intTime
|
||||
exports.timestamp = legacy.timestamp
|
||||
|
||||
legacy.warnFileDeprecation(__filename)
|
86
node_modules/yaml/util.d.ts
generated
vendored
Normal file
86
node_modules/yaml/util.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,86 @@
|
|||
import { Document } from './index'
|
||||
import { CST } from './parse-cst'
|
||||
import { AST, Pair, Scalar, Schema } from './types'
|
||||
|
||||
export function findPair(items: any[], key: Scalar | any): Pair | undefined
|
||||
|
||||
export function parseMap(doc: Document, cst: CST.Map): AST.BlockMap
|
||||
export function parseMap(doc: Document, cst: CST.FlowMap): AST.FlowMap
|
||||
export function parseSeq(doc: Document, cst: CST.Seq): AST.BlockSeq
|
||||
export function parseSeq(doc: Document, cst: CST.FlowSeq): AST.FlowSeq
|
||||
|
||||
export function stringifyNumber(item: Scalar): string
|
||||
export function stringifyString(
|
||||
item: Scalar,
|
||||
ctx: Schema.StringifyContext,
|
||||
onComment?: () => void,
|
||||
onChompKeep?: () => void
|
||||
): string
|
||||
|
||||
export function toJSON(
|
||||
value: any,
|
||||
arg?: any,
|
||||
ctx?: Schema.CreateNodeContext
|
||||
): any
|
||||
|
||||
export enum Type {
|
||||
ALIAS = 'ALIAS',
|
||||
BLANK_LINE = 'BLANK_LINE',
|
||||
BLOCK_FOLDED = 'BLOCK_FOLDED',
|
||||
BLOCK_LITERAL = 'BLOCK_LITERAL',
|
||||
COMMENT = 'COMMENT',
|
||||
DIRECTIVE = 'DIRECTIVE',
|
||||
DOCUMENT = 'DOCUMENT',
|
||||
FLOW_MAP = 'FLOW_MAP',
|
||||
FLOW_SEQ = 'FLOW_SEQ',
|
||||
MAP = 'MAP',
|
||||
MAP_KEY = 'MAP_KEY',
|
||||
MAP_VALUE = 'MAP_VALUE',
|
||||
PLAIN = 'PLAIN',
|
||||
QUOTE_DOUBLE = 'QUOTE_DOUBLE',
|
||||
QUOTE_SINGLE = 'QUOTE_SINGLE',
|
||||
SEQ = 'SEQ',
|
||||
SEQ_ITEM = 'SEQ_ITEM'
|
||||
}
|
||||
|
||||
interface LinePos {
|
||||
line: number
|
||||
col: number
|
||||
}
|
||||
|
||||
export class YAMLError extends Error {
|
||||
name:
|
||||
| 'YAMLReferenceError'
|
||||
| 'YAMLSemanticError'
|
||||
| 'YAMLSyntaxError'
|
||||
| 'YAMLWarning'
|
||||
message: string
|
||||
source?: CST.Node
|
||||
|
||||
nodeType?: Type
|
||||
range?: CST.Range
|
||||
linePos?: { start: LinePos; end: LinePos }
|
||||
|
||||
/**
|
||||
* Drops `source` and adds `nodeType`, `range` and `linePos`, as well as
|
||||
* adding details to `message`. Run automatically for document errors if
|
||||
* the `prettyErrors` option is set.
|
||||
*/
|
||||
makePretty(): void
|
||||
}
|
||||
|
||||
export class YAMLReferenceError extends YAMLError {
|
||||
name: 'YAMLReferenceError'
|
||||
}
|
||||
|
||||
export class YAMLSemanticError extends YAMLError {
|
||||
name: 'YAMLSemanticError'
|
||||
}
|
||||
|
||||
export class YAMLSyntaxError extends YAMLError {
|
||||
name: 'YAMLSyntaxError'
|
||||
}
|
||||
|
||||
export class YAMLWarning extends YAMLError {
|
||||
name: 'YAMLWarning'
|
||||
}
|
16
node_modules/yaml/util.js
generated
vendored
Normal file
16
node_modules/yaml/util.js
generated
vendored
Normal file
|
@ -0,0 +1,16 @@
|
|||
const util = require('./dist/util')
|
||||
|
||||
exports.findPair = util.findPair
|
||||
exports.toJSON = util.toJSON
|
||||
exports.parseMap = util.parseMap
|
||||
exports.parseSeq = util.parseSeq
|
||||
|
||||
exports.stringifyNumber = util.stringifyNumber
|
||||
exports.stringifyString = util.stringifyString
|
||||
exports.Type = util.Type
|
||||
|
||||
exports.YAMLError = util.YAMLError
|
||||
exports.YAMLReferenceError = util.YAMLReferenceError
|
||||
exports.YAMLSemanticError = util.YAMLSemanticError
|
||||
exports.YAMLSyntaxError = util.YAMLSyntaxError
|
||||
exports.YAMLWarning = util.YAMLWarning
|
18
node_modules/yaml/util.mjs
generated
vendored
Normal file
18
node_modules/yaml/util.mjs
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
import util from './dist/util.js'
|
||||
|
||||
export const findPair = util.findPair
|
||||
export const toJSON = util.toJSON
|
||||
|
||||
export const parseMap = util.parseMap
|
||||
export const parseSeq = util.parseSeq
|
||||
|
||||
export const stringifyNumber = util.stringifyNumber
|
||||
export const stringifyString = util.stringifyString
|
||||
|
||||
export const Type = util.Type
|
||||
|
||||
export const YAMLError = util.YAMLError
|
||||
export const YAMLReferenceError = util.YAMLReferenceError
|
||||
export const YAMLSemanticError = util.YAMLSemanticError
|
||||
export const YAMLSyntaxError = util.YAMLSyntaxError
|
||||
export const YAMLWarning = util.YAMLWarning
|
42
package-lock.json
generated
42
package-lock.json
generated
|
@ -1,7 +1,42 @@
|
|||
{
|
||||
"name": "action-ansible-playbook",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"lockfileVersion": 1,
|
||||
"packages": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/exec": "^1.0.4",
|
||||
"yaml": "^1.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/core": {
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.6.tgz",
|
||||
"integrity": "sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA=="
|
||||
},
|
||||
"node_modules/@actions/exec": {
|
||||
"version": "1.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.4.tgz",
|
||||
"integrity": "sha512-4DPChWow9yc9W3WqEbUj8Nr86xkpyE29ZzWjXucHItclLbEW6jr80Zx4nqv18QL6KK65+cifiQZXvnqgTV6oHw==",
|
||||
"dependencies": {
|
||||
"@actions/io": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/io": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.2.tgz",
|
||||
"integrity": "sha512-J8KuFqVPr3p6U8W93DOXlXW6zFvrQAJANdS+vw0YhusLIq+bszW8zmK2Fh1C2kDPX8FMvwIl1OUcFgvJoXLbAg=="
|
||||
},
|
||||
"node_modules/yaml": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.0.tgz",
|
||||
"integrity": "sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg==",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
}
|
||||
}
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": {
|
||||
"version": "1.2.6",
|
||||
|
@ -20,6 +55,11 @@
|
|||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.2.tgz",
|
||||
"integrity": "sha512-J8KuFqVPr3p6U8W93DOXlXW6zFvrQAJANdS+vw0YhusLIq+bszW8zmK2Fh1C2kDPX8FMvwIl1OUcFgvJoXLbAg=="
|
||||
},
|
||||
"yaml": {
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.0.tgz",
|
||||
"integrity": "sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg=="
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
"main": "main.js",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/exec": "^1.0.4"
|
||||
"@actions/exec": "^1.0.4",
|
||||
"yaml": "^1.10.0"
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue