mirror of
https://github.com/dawidd6/action-ansible-playbook.git
synced 2024-11-30 03:45:19 +00:00
2374 lines
68 KiB
JavaScript
2374 lines
68 KiB
JavaScript
|
import { c as _classCallCheck, j as _inherits, k as _createSuper, b as _createClass, e as _defineProperty, p as _assertThisInitialized, a as _typeof, q as _toArray, T as Type, _ as _createForOfIteratorHelper, l as _get, m as _getPrototypeOf, o as YAMLReferenceError, r as _possibleConstructorReturn, h as _slicedToArray, g as YAMLSemanticError, n as defaultTags, f as YAMLWarning, C as Char, Y as YAMLSyntaxError, P as PlainValue } from './PlainValue-ff5147c6.js';
|
||
|
|
||
|
function addCommentBefore(str, indent, comment) {
|
||
|
if (!comment) return str;
|
||
|
var cc = comment.replace(/[\s\S]^/gm, "$&".concat(indent, "#"));
|
||
|
return "#".concat(cc, "\n").concat(indent).concat(str);
|
||
|
}
|
||
|
function addComment(str, indent, comment) {
|
||
|
return !comment ? str : comment.indexOf('\n') === -1 ? "".concat(str, " #").concat(comment) : "".concat(str, "\n") + comment.replace(/^/gm, "".concat(indent || '', "#"));
|
||
|
}
|
||
|
|
||
|
var Node = function Node() {
|
||
|
_classCallCheck(this, Node);
|
||
|
};
|
||
|
|
||
|
function toJSON(value, arg, ctx) {
|
||
|
if (Array.isArray(value)) return value.map(function (v, i) {
|
||
|
return toJSON(v, String(i), ctx);
|
||
|
});
|
||
|
|
||
|
if (value && typeof value.toJSON === 'function') {
|
||
|
var anchor = ctx && ctx.anchors && ctx.anchors.get(value);
|
||
|
if (anchor) ctx.onCreate = function (res) {
|
||
|
anchor.res = res;
|
||
|
delete ctx.onCreate;
|
||
|
};
|
||
|
var res = value.toJSON(arg, ctx);
|
||
|
if (anchor && ctx.onCreate) ctx.onCreate(res);
|
||
|
return res;
|
||
|
}
|
||
|
|
||
|
if ((!ctx || !ctx.keep) && typeof value === 'bigint') return Number(value);
|
||
|
return value;
|
||
|
}
|
||
|
|
||
|
var Scalar = /*#__PURE__*/function (_Node) {
|
||
|
_inherits(Scalar, _Node);
|
||
|
|
||
|
var _super = _createSuper(Scalar);
|
||
|
|
||
|
function Scalar(value) {
|
||
|
var _this;
|
||
|
|
||
|
_classCallCheck(this, Scalar);
|
||
|
|
||
|
_this = _super.call(this);
|
||
|
_this.value = value;
|
||
|
return _this;
|
||
|
}
|
||
|
|
||
|
_createClass(Scalar, [{
|
||
|
key: "toJSON",
|
||
|
value: function toJSON$1(arg, ctx) {
|
||
|
return ctx && ctx.keep ? this.value : toJSON(this.value, arg, ctx);
|
||
|
}
|
||
|
}, {
|
||
|
key: "toString",
|
||
|
value: function toString() {
|
||
|
return String(this.value);
|
||
|
}
|
||
|
}]);
|
||
|
|
||
|
return Scalar;
|
||
|
}(Node);
|
||
|
|
||
|
function collectionFromPath(schema, path, value) {
|
||
|
var v = value;
|
||
|
|
||
|
for (var i = path.length - 1; i >= 0; --i) {
|
||
|
var k = path[i];
|
||
|
var o = Number.isInteger(k) && k >= 0 ? [] : {};
|
||
|
o[k] = v;
|
||
|
v = o;
|
||
|
}
|
||
|
|
||
|
return schema.createNode(v, false);
|
||
|
} // null, undefined, or an empty non-string iterable (e.g. [])
|
||
|
|
||
|
|
||
|
var isEmptyPath = function isEmptyPath(path) {
|
||
|
return path == null || _typeof(path) === 'object' && path[Symbol.iterator]().next().done;
|
||
|
};
|
||
|
var Collection = /*#__PURE__*/function (_Node) {
|
||
|
_inherits(Collection, _Node);
|
||
|
|
||
|
var _super = _createSuper(Collection);
|
||
|
|
||
|
function Collection(schema) {
|
||
|
var _this;
|
||
|
|
||
|
_classCallCheck(this, Collection);
|
||
|
|
||
|
_this = _super.call(this);
|
||
|
|
||
|
_defineProperty(_assertThisInitialized(_this), "items", []);
|
||
|
|
||
|
_this.schema = schema;
|
||
|
return _this;
|
||
|
}
|
||
|
|
||
|
_createClass(Collection, [{
|
||
|
key: "addIn",
|
||
|
value: function addIn(path, value) {
|
||
|
if (isEmptyPath(path)) this.add(value);else {
|
||
|
var _path = _toArray(path),
|
||
|
key = _path[0],
|
||
|
rest = _path.slice(1);
|
||
|
|
||
|
var node = this.get(key, true);
|
||
|
if (node instanceof Collection) node.addIn(rest, value);else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value));else throw new Error("Expected YAML collection at ".concat(key, ". Remaining path: ").concat(rest));
|
||
|
}
|
||
|
}
|
||
|
}, {
|
||
|
key: "deleteIn",
|
||
|
value: function deleteIn(_ref) {
|
||
|
var _ref2 = _toArray(_ref),
|
||
|
key = _ref2[0],
|
||
|
rest = _ref2.slice(1);
|
||
|
|
||
|
if (rest.length === 0) return this.delete(key);
|
||
|
var node = this.get(key, true);
|
||
|
if (node instanceof Collection) return node.deleteIn(rest);else throw new Error("Expected YAML collection at ".concat(key, ". Remaining path: ").concat(rest));
|
||
|
}
|
||
|
}, {
|
||
|
key: "getIn",
|
||
|
value: function getIn(_ref3, keepScalar) {
|
||
|
var _ref4 = _toArray(_ref3),
|
||
|
key = _ref4[0],
|
||
|
rest = _ref4.slice(1);
|
||
|
|
||
|
var node = this.get(key, true);
|
||
|
if (rest.length === 0) return !keepScalar && node instanceof Scalar ? node.value : node;else return node instanceof Collection ? node.getIn(rest, keepScalar) : undefined;
|
||
|
}
|
||
|
}, {
|
||
|
key: "hasAllNullValues",
|
||
|
value: function hasAllNullValues() {
|
||
|
return this.items.every(function (node) {
|
||
|
if (!node || node.type !== 'PAIR') return false;
|
||
|
var n = node.value;
|
||
|
return n == null || n instanceof Scalar && n.value == null && !n.commentBefore && !n.comment && !n.tag;
|
||
|
});
|
||
|
}
|
||
|
}, {
|
||
|
key: "hasIn",
|
||
|
value: function hasIn(_ref5) {
|
||
|
var _ref6 = _toArray(_ref5),
|
||
|
key = _ref6[0],
|
||
|
rest = _ref6.slice(1);
|
||
|
|
||
|
if (rest.length === 0) return this.has(key);
|
||
|
var node = this.get(key, true);
|
||
|
return node instanceof Collection ? node.hasIn(rest) : false;
|
||
|
}
|
||
|
}, {
|
||
|
key: "setIn",
|
||
|
value: function setIn(_ref7, value) {
|
||
|
var _ref8 = _toArray(_ref7),
|
||
|
key = _ref8[0],
|
||
|
rest = _ref8.slice(1);
|
||
|
|
||
|
if (rest.length === 0) {
|
||
|
this.set(key, value);
|
||
|
} else {
|
||
|
var node = this.get(key, true);
|
||
|
if (node instanceof Collection) node.setIn(rest, value);else if (node === undefined && this.schema) this.set(key, collectionFromPath(this.schema, rest, value));else throw new Error("Expected YAML collection at ".concat(key, ". Remaining path: ").concat(rest));
|
||
|
}
|
||
|
} // overridden in implementations
|
||
|
|
||
|
/* istanbul ignore next */
|
||
|
|
||
|
}, {
|
||
|
key: "toJSON",
|
||
|
value: function toJSON() {
|
||
|
return null;
|
||
|
}
|
||
|
}, {
|
||
|
key: "toString",
|
||
|
value: function toString(ctx, _ref9, onComment, onChompKeep) {
|
||
|
var _this2 = this;
|
||
|
|
||
|
var blockItem = _ref9.blockItem,
|
||
|
flowChars = _ref9.flowChars,
|
||
|
isMap = _ref9.isMap,
|
||
|
itemIndent = _ref9.itemIndent;
|
||
|
var _ctx = ctx,
|
||
|
indent = _ctx.indent,
|
||
|
indentStep = _ctx.indentStep,
|
||
|
stringify = _ctx.stringify;
|
||
|
var inFlow = this.type === Type.FLOW_MAP || this.type === Type.FLOW_SEQ || ctx.inFlow;
|
||
|
if (inFlow) itemIndent += indentStep;
|
||
|
var allNullValues = isMap && this.hasAllNullValues();
|
||
|
ctx = Object.assign({}, ctx, {
|
||
|
allNullValues: allNullValues,
|
||
|
indent: itemIndent,
|
||
|
inFlow: inFlow,
|
||
|
type: null
|
||
|
});
|
||
|
var chompKeep = false;
|
||
|
var hasItemWithNewLine = false;
|
||
|
var nodes = this.items.reduce(function (nodes, item, i) {
|
||
|
var comment;
|
||
|
|
||
|
if (item) {
|
||
|
if (!chompKeep && item.spaceBefore) nodes.push({
|
||
|
type: 'comment',
|
||
|
str: ''
|
||
|
});
|
||
|
if (item.commentBefore) item.commentBefore.match(/^.*$/gm).forEach(function (line) {
|
||
|
nodes.push({
|
||
|
type: 'comment',
|
||
|
str: "#".concat(line)
|
||
|
});
|
||
|
});
|
||
|
if (item.comment) comment = item.comment;
|
||
|
if (inFlow && (!chompKeep && item.spaceBefore || item.commentBefore || item.comment || item.key && (item.key.commentBefore || item.key.comment) || item.value && (item.value.commentBefore || item.value.comment))) hasItemWithNewLine = true;
|
||
|
}
|
||
|
|
||
|
chompKeep = false;
|
||
|
var str = stringify(item, ctx, function () {
|
||
|
return comment = null;
|
||
|
}, function () {
|
||
|
return chompKeep = true;
|
||
|
});
|
||
|
if (inFlow && !hasItemWithNewLine && str.includes('\n')) hasItemWithNewLine = true;
|
||
|
if (inFlow && i < _this2.items.length - 1) str += ',';
|
||
|
str = addComment(str, itemIndent, comment);
|
||
|
if (chompKeep && (comment || inFlow)) chompKeep = false;
|
||
|
nodes.push({
|
||
|
type: 'item',
|
||
|
str: str
|
||
|
});
|
||
|
return nodes;
|
||
|
}, []);
|
||
|
var str;
|
||
|
|
||
|
if (nodes.length === 0) {
|
||
|
str = flowChars.start + flowChars.end;
|
||
|
} else if (inFlow) {
|
||
|
var start = flowChars.start,
|
||
|
end = flowChars.end;
|
||
|
var strings = nodes.map(function (n) {
|
||
|
return n.str;
|
||
|
});
|
||
|
|
||
|
if (hasItemWithNewLine || strings.reduce(function (sum, str) {
|
||
|
return sum + str.length + 2;
|
||
|
}, 2) > Collection.maxFlowStringSingleLineLength) {
|
||
|
str = start;
|
||
|
|
||
|
var _iterator = _createForOfIteratorHelper(strings),
|
||
|
_step;
|
||
|
|
||
|
try {
|
||
|
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||
|
var s = _step.value;
|
||
|
str += s ? "\n".concat(indentStep).concat(indent).concat(s) : '\n';
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator.e(err);
|
||
|
} finally {
|
||
|
_iterator.f();
|
||
|
}
|
||
|
|
||
|
str += "\n".concat(indent).concat(end);
|
||
|
} else {
|
||
|
str = "".concat(start, " ").concat(strings.join(' '), " ").concat(end);
|
||
|
}
|
||
|
} else {
|
||
|
var _strings = nodes.map(blockItem);
|
||
|
|
||
|
str = _strings.shift();
|
||
|
|
||
|
var _iterator2 = _createForOfIteratorHelper(_strings),
|
||
|
_step2;
|
||
|
|
||
|
try {
|
||
|
for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
|
||
|
var _s = _step2.value;
|
||
|
str += _s ? "\n".concat(indent).concat(_s) : '\n';
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator2.e(err);
|
||
|
} finally {
|
||
|
_iterator2.f();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (this.comment) {
|
||
|
str += '\n' + this.comment.replace(/^/gm, "".concat(indent, "#"));
|
||
|
if (onComment) onComment();
|
||
|
} else if (chompKeep && onChompKeep) onChompKeep();
|
||
|
|
||
|
return str;
|
||
|
}
|
||
|
}]);
|
||
|
|
||
|
return Collection;
|
||
|
}(Node);
|
||
|
|
||
|
_defineProperty(Collection, "maxFlowStringSingleLineLength", 60);
|
||
|
|
||
|
function asItemIndex(key) {
|
||
|
var idx = key instanceof Scalar ? key.value : key;
|
||
|
if (idx && typeof idx === 'string') idx = Number(idx);
|
||
|
return Number.isInteger(idx) && idx >= 0 ? idx : null;
|
||
|
}
|
||
|
|
||
|
var YAMLSeq = /*#__PURE__*/function (_Collection) {
|
||
|
_inherits(YAMLSeq, _Collection);
|
||
|
|
||
|
var _super = _createSuper(YAMLSeq);
|
||
|
|
||
|
function YAMLSeq() {
|
||
|
_classCallCheck(this, YAMLSeq);
|
||
|
|
||
|
return _super.apply(this, arguments);
|
||
|
}
|
||
|
|
||
|
_createClass(YAMLSeq, [{
|
||
|
key: "add",
|
||
|
value: function add(value) {
|
||
|
this.items.push(value);
|
||
|
}
|
||
|
}, {
|
||
|
key: "delete",
|
||
|
value: function _delete(key) {
|
||
|
var idx = asItemIndex(key);
|
||
|
if (typeof idx !== 'number') return false;
|
||
|
var del = this.items.splice(idx, 1);
|
||
|
return del.length > 0;
|
||
|
}
|
||
|
}, {
|
||
|
key: "get",
|
||
|
value: function get(key, keepScalar) {
|
||
|
var idx = asItemIndex(key);
|
||
|
if (typeof idx !== 'number') return undefined;
|
||
|
var it = this.items[idx];
|
||
|
return !keepScalar && it instanceof Scalar ? it.value : it;
|
||
|
}
|
||
|
}, {
|
||
|
key: "has",
|
||
|
value: function has(key) {
|
||
|
var idx = asItemIndex(key);
|
||
|
return typeof idx === 'number' && idx < this.items.length;
|
||
|
}
|
||
|
}, {
|
||
|
key: "set",
|
||
|
value: function set(key, value) {
|
||
|
var idx = asItemIndex(key);
|
||
|
if (typeof idx !== 'number') throw new Error("Expected a valid index, not ".concat(key, "."));
|
||
|
this.items[idx] = value;
|
||
|
}
|
||
|
}, {
|
||
|
key: "toJSON",
|
||
|
value: function toJSON$1(_, ctx) {
|
||
|
var seq = [];
|
||
|
if (ctx && ctx.onCreate) ctx.onCreate(seq);
|
||
|
var i = 0;
|
||
|
|
||
|
var _iterator = _createForOfIteratorHelper(this.items),
|
||
|
_step;
|
||
|
|
||
|
try {
|
||
|
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||
|
var item = _step.value;
|
||
|
seq.push(toJSON(item, String(i++), ctx));
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator.e(err);
|
||
|
} finally {
|
||
|
_iterator.f();
|
||
|
}
|
||
|
|
||
|
return seq;
|
||
|
}
|
||
|
}, {
|
||
|
key: "toString",
|
||
|
value: function toString(ctx, onComment, onChompKeep) {
|
||
|
if (!ctx) return JSON.stringify(this);
|
||
|
return _get(_getPrototypeOf(YAMLSeq.prototype), "toString", this).call(this, ctx, {
|
||
|
blockItem: function blockItem(n) {
|
||
|
return n.type === 'comment' ? n.str : "- ".concat(n.str);
|
||
|
},
|
||
|
flowChars: {
|
||
|
start: '[',
|
||
|
end: ']'
|
||
|
},
|
||
|
isMap: false,
|
||
|
itemIndent: (ctx.indent || '') + ' '
|
||
|
}, onComment, onChompKeep);
|
||
|
}
|
||
|
}]);
|
||
|
|
||
|
return YAMLSeq;
|
||
|
}(Collection);
|
||
|
|
||
|
var stringifyKey = function stringifyKey(key, jsKey, ctx) {
|
||
|
if (jsKey === null) return '';
|
||
|
if (_typeof(jsKey) !== 'object') return String(jsKey);
|
||
|
if (key instanceof Node && ctx && ctx.doc) return key.toString({
|
||
|
anchors: {},
|
||
|
doc: ctx.doc,
|
||
|
indent: '',
|
||
|
indentStep: ctx.indentStep,
|
||
|
inFlow: true,
|
||
|
inStringifyKey: true,
|
||
|
stringify: ctx.stringify
|
||
|
});
|
||
|
return JSON.stringify(jsKey);
|
||
|
};
|
||
|
|
||
|
var Pair = /*#__PURE__*/function (_Node) {
|
||
|
_inherits(Pair, _Node);
|
||
|
|
||
|
var _super = _createSuper(Pair);
|
||
|
|
||
|
function Pair(key) {
|
||
|
var _this;
|
||
|
|
||
|
var value = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
|
||
|
|
||
|
_classCallCheck(this, Pair);
|
||
|
|
||
|
_this = _super.call(this);
|
||
|
_this.key = key;
|
||
|
_this.value = value;
|
||
|
_this.type = Pair.Type.PAIR;
|
||
|
return _this;
|
||
|
}
|
||
|
|
||
|
_createClass(Pair, [{
|
||
|
key: "addToJSMap",
|
||
|
value: function addToJSMap(ctx, map) {
|
||
|
var key = toJSON(this.key, '', ctx);
|
||
|
|
||
|
if (map instanceof Map) {
|
||
|
var value = toJSON(this.value, key, ctx);
|
||
|
map.set(key, value);
|
||
|
} else if (map instanceof Set) {
|
||
|
map.add(key);
|
||
|
} else {
|
||
|
var stringKey = stringifyKey(this.key, key, ctx);
|
||
|
map[stringKey] = toJSON(this.value, stringKey, ctx);
|
||
|
}
|
||
|
|
||
|
return map;
|
||
|
}
|
||
|
}, {
|
||
|
key: "toJSON",
|
||
|
value: function toJSON(_, ctx) {
|
||
|
var pair = ctx && ctx.mapAsMap ? new Map() : {};
|
||
|
return this.addToJSMap(ctx, pair);
|
||
|
}
|
||
|
}, {
|
||
|
key: "toString",
|
||
|
value: function toString(ctx, onComment, onChompKeep) {
|
||
|
if (!ctx || !ctx.doc) return JSON.stringify(this);
|
||
|
var _ctx$doc$options = ctx.doc.options,
|
||
|
indentSize = _ctx$doc$options.indent,
|
||
|
indentSeq = _ctx$doc$options.indentSeq,
|
||
|
simpleKeys = _ctx$doc$options.simpleKeys;
|
||
|
var key = this.key,
|
||
|
value = this.value;
|
||
|
var keyComment = key instanceof Node && key.comment;
|
||
|
|
||
|
if (simpleKeys) {
|
||
|
if (keyComment) {
|
||
|
throw new Error('With simple keys, key nodes cannot have comments');
|
||
|
}
|
||
|
|
||
|
if (key instanceof Collection) {
|
||
|
var msg = 'With simple keys, collection cannot be used as a key value';
|
||
|
throw new Error(msg);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var explicitKey = !simpleKeys && (!key || keyComment || key instanceof Collection || key.type === Type.BLOCK_FOLDED || key.type === Type.BLOCK_LITERAL);
|
||
|
var _ctx = ctx,
|
||
|
doc = _ctx.doc,
|
||
|
indent = _ctx.indent,
|
||
|
indentStep = _ctx.indentStep,
|
||
|
stringify = _ctx.stringify;
|
||
|
ctx = Object.assign({}, ctx, {
|
||
|
implicitKey: !explicitKey,
|
||
|
indent: indent + indentStep
|
||
|
});
|
||
|
var chompKeep = false;
|
||
|
var str = stringify(key, ctx, function () {
|
||
|
return keyComment = null;
|
||
|
}, function () {
|
||
|
return chompKeep = true;
|
||
|
});
|
||
|
str = addComment(str, ctx.indent, keyComment);
|
||
|
|
||
|
if (ctx.allNullValues && !simpleKeys) {
|
||
|
if (this.comment) {
|
||
|
str = addComment(str, ctx.indent, this.comment);
|
||
|
if (onComment) onComment();
|
||
|
} else if (chompKeep && !keyComment && onChompKeep) onChompKeep();
|
||
|
|
||
|
return ctx.inFlow ? str : "? ".concat(str);
|
||
|
}
|
||
|
|
||
|
str = explicitKey ? "? ".concat(str, "\n").concat(indent, ":") : "".concat(str, ":");
|
||
|
|
||
|
if (this.comment) {
|
||
|
// expected (but not strictly required) to be a single-line comment
|
||
|
str = addComment(str, ctx.indent, this.comment);
|
||
|
if (onComment) onComment();
|
||
|
}
|
||
|
|
||
|
var vcb = '';
|
||
|
var valueComment = null;
|
||
|
|
||
|
if (value instanceof Node) {
|
||
|
if (value.spaceBefore) vcb = '\n';
|
||
|
|
||
|
if (value.commentBefore) {
|
||
|
var cs = value.commentBefore.replace(/^/gm, "".concat(ctx.indent, "#"));
|
||
|
vcb += "\n".concat(cs);
|
||
|
}
|
||
|
|
||
|
valueComment = value.comment;
|
||
|
} else if (value && _typeof(value) === 'object') {
|
||
|
value = doc.schema.createNode(value, true);
|
||
|
}
|
||
|
|
||
|
ctx.implicitKey = false;
|
||
|
if (!explicitKey && !this.comment && value instanceof Scalar) ctx.indentAtStart = str.length + 1;
|
||
|
chompKeep = false;
|
||
|
|
||
|
if (!indentSeq && indentSize >= 2 && !ctx.inFlow && !explicitKey && value instanceof YAMLSeq && value.type !== Type.FLOW_SEQ && !value.tag && !doc.anchors.getName(value)) {
|
||
|
// If indentSeq === false, consider '- ' as part of indentation where possible
|
||
|
ctx.indent = ctx.indent.substr(2);
|
||
|
}
|
||
|
|
||
|
var valueStr = stringify(value, ctx, function () {
|
||
|
return valueComment = null;
|
||
|
}, function () {
|
||
|
return chompKeep = true;
|
||
|
});
|
||
|
var ws = ' ';
|
||
|
|
||
|
if (vcb || this.comment) {
|
||
|
ws = "".concat(vcb, "\n").concat(ctx.indent);
|
||
|
} else if (!explicitKey && value instanceof Collection) {
|
||
|
var flow = valueStr[0] === '[' || valueStr[0] === '{';
|
||
|
if (!flow || valueStr.includes('\n')) ws = "\n".concat(ctx.indent);
|
||
|
}
|
||
|
|
||
|
if (chompKeep && !valueComment && onChompKeep) onChompKeep();
|
||
|
return addComment(str + ws + valueStr, ctx.indent, valueComment);
|
||
|
}
|
||
|
}, {
|
||
|
key: "commentBefore",
|
||
|
get: function get() {
|
||
|
return this.key instanceof Node ? this.key.commentBefore : undefined;
|
||
|
},
|
||
|
set: function set(cb) {
|
||
|
if (this.key == null) this.key = new Scalar(null);
|
||
|
if (this.key instanceof Node) this.key.commentBefore = cb;else {
|
||
|
var msg = 'Pair.commentBefore is an alias for Pair.key.commentBefore. To set it, the key must be a Node.';
|
||
|
throw new Error(msg);
|
||
|
}
|
||
|
}
|
||
|
}]);
|
||
|
|
||
|
return Pair;
|
||
|
}(Node);
|
||
|
|
||
|
_defineProperty(Pair, "Type", {
|
||
|
PAIR: 'PAIR',
|
||
|
MERGE_PAIR: 'MERGE_PAIR'
|
||
|
});
|
||
|
|
||
|
var getAliasCount = function getAliasCount(node, anchors) {
|
||
|
if (node instanceof Alias) {
|
||
|
var anchor = anchors.get(node.source);
|
||
|
return anchor.count * anchor.aliasCount;
|
||
|
} else if (node instanceof Collection) {
|
||
|
var count = 0;
|
||
|
|
||
|
var _iterator = _createForOfIteratorHelper(node.items),
|
||
|
_step;
|
||
|
|
||
|
try {
|
||
|
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||
|
var item = _step.value;
|
||
|
var c = getAliasCount(item, anchors);
|
||
|
if (c > count) count = c;
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator.e(err);
|
||
|
} finally {
|
||
|
_iterator.f();
|
||
|
}
|
||
|
|
||
|
return count;
|
||
|
} else if (node instanceof Pair) {
|
||
|
var kc = getAliasCount(node.key, anchors);
|
||
|
var vc = getAliasCount(node.value, anchors);
|
||
|
return Math.max(kc, vc);
|
||
|
}
|
||
|
|
||
|
return 1;
|
||
|
};
|
||
|
|
||
|
var Alias = /*#__PURE__*/function (_Node) {
|
||
|
_inherits(Alias, _Node);
|
||
|
|
||
|
var _super = _createSuper(Alias);
|
||
|
|
||
|
_createClass(Alias, null, [{
|
||
|
key: "stringify",
|
||
|
value: function stringify(_ref, _ref2) {
|
||
|
var range = _ref.range,
|
||
|
source = _ref.source;
|
||
|
var anchors = _ref2.anchors,
|
||
|
doc = _ref2.doc,
|
||
|
implicitKey = _ref2.implicitKey,
|
||
|
inStringifyKey = _ref2.inStringifyKey;
|
||
|
var anchor = Object.keys(anchors).find(function (a) {
|
||
|
return anchors[a] === source;
|
||
|
});
|
||
|
if (!anchor && inStringifyKey) anchor = doc.anchors.getName(source) || doc.anchors.newName();
|
||
|
if (anchor) return "*".concat(anchor).concat(implicitKey ? ' ' : '');
|
||
|
var msg = doc.anchors.getName(source) ? 'Alias node must be after source node' : 'Source node not found for alias node';
|
||
|
throw new Error("".concat(msg, " [").concat(range, "]"));
|
||
|
}
|
||
|
}]);
|
||
|
|
||
|
function Alias(source) {
|
||
|
var _this;
|
||
|
|
||
|
_classCallCheck(this, Alias);
|
||
|
|
||
|
_this = _super.call(this);
|
||
|
_this.source = source;
|
||
|
_this.type = Type.ALIAS;
|
||
|
return _this;
|
||
|
}
|
||
|
|
||
|
_createClass(Alias, [{
|
||
|
key: "toJSON",
|
||
|
value: function toJSON$1(arg, ctx) {
|
||
|
if (!ctx) return toJSON(this.source, arg, ctx);
|
||
|
var anchors = ctx.anchors,
|
||
|
maxAliasCount = ctx.maxAliasCount;
|
||
|
var anchor = anchors.get(this.source);
|
||
|
/* istanbul ignore if */
|
||
|
|
||
|
if (!anchor || anchor.res === undefined) {
|
||
|
var msg = 'This should not happen: Alias anchor was not resolved?';
|
||
|
if (this.cstNode) throw new YAMLReferenceError(this.cstNode, msg);else throw new ReferenceError(msg);
|
||
|
}
|
||
|
|
||
|
if (maxAliasCount >= 0) {
|
||
|
anchor.count += 1;
|
||
|
if (anchor.aliasCount === 0) anchor.aliasCount = getAliasCount(this.source, anchors);
|
||
|
|
||
|
if (anchor.count * anchor.aliasCount > maxAliasCount) {
|
||
|
var _msg = 'Excessive alias count indicates a resource exhaustion attack';
|
||
|
if (this.cstNode) throw new YAMLReferenceError(this.cstNode, _msg);else throw new ReferenceError(_msg);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return anchor.res;
|
||
|
} // Only called when stringifying an alias mapping key while constructing
|
||
|
// Object output.
|
||
|
|
||
|
}, {
|
||
|
key: "toString",
|
||
|
value: function toString(ctx) {
|
||
|
return Alias.stringify(this, ctx);
|
||
|
}
|
||
|
}, {
|
||
|
key: "tag",
|
||
|
set: function set(t) {
|
||
|
throw new Error('Alias nodes cannot have tags');
|
||
|
}
|
||
|
}]);
|
||
|
|
||
|
return Alias;
|
||
|
}(Node);
|
||
|
|
||
|
_defineProperty(Alias, "default", true);
|
||
|
|
||
|
function findPair(items, key) {
|
||
|
var k = key instanceof Scalar ? key.value : key;
|
||
|
|
||
|
var _iterator = _createForOfIteratorHelper(items),
|
||
|
_step;
|
||
|
|
||
|
try {
|
||
|
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||
|
var it = _step.value;
|
||
|
|
||
|
if (it instanceof Pair) {
|
||
|
if (it.key === key || it.key === k) return it;
|
||
|
if (it.key && it.key.value === k) return it;
|
||
|
}
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator.e(err);
|
||
|
} finally {
|
||
|
_iterator.f();
|
||
|
}
|
||
|
|
||
|
return undefined;
|
||
|
}
|
||
|
var YAMLMap = /*#__PURE__*/function (_Collection) {
|
||
|
_inherits(YAMLMap, _Collection);
|
||
|
|
||
|
var _super = _createSuper(YAMLMap);
|
||
|
|
||
|
function YAMLMap() {
|
||
|
_classCallCheck(this, YAMLMap);
|
||
|
|
||
|
return _super.apply(this, arguments);
|
||
|
}
|
||
|
|
||
|
_createClass(YAMLMap, [{
|
||
|
key: "add",
|
||
|
value: function add(pair, overwrite) {
|
||
|
if (!pair) pair = new Pair(pair);else if (!(pair instanceof Pair)) pair = new Pair(pair.key || pair, pair.value);
|
||
|
var prev = findPair(this.items, pair.key);
|
||
|
var sortEntries = this.schema && this.schema.sortMapEntries;
|
||
|
|
||
|
if (prev) {
|
||
|
if (overwrite) prev.value = pair.value;else throw new Error("Key ".concat(pair.key, " already set"));
|
||
|
} else if (sortEntries) {
|
||
|
var i = this.items.findIndex(function (item) {
|
||
|
return sortEntries(pair, item) < 0;
|
||
|
});
|
||
|
if (i === -1) this.items.push(pair);else this.items.splice(i, 0, pair);
|
||
|
} else {
|
||
|
this.items.push(pair);
|
||
|
}
|
||
|
}
|
||
|
}, {
|
||
|
key: "delete",
|
||
|
value: function _delete(key) {
|
||
|
var it = findPair(this.items, key);
|
||
|
if (!it) return false;
|
||
|
var del = this.items.splice(this.items.indexOf(it), 1);
|
||
|
return del.length > 0;
|
||
|
}
|
||
|
}, {
|
||
|
key: "get",
|
||
|
value: function get(key, keepScalar) {
|
||
|
var it = findPair(this.items, key);
|
||
|
var node = it && it.value;
|
||
|
return !keepScalar && node instanceof Scalar ? node.value : node;
|
||
|
}
|
||
|
}, {
|
||
|
key: "has",
|
||
|
value: function has(key) {
|
||
|
return !!findPair(this.items, key);
|
||
|
}
|
||
|
}, {
|
||
|
key: "set",
|
||
|
value: function set(key, value) {
|
||
|
this.add(new Pair(key, value), true);
|
||
|
}
|
||
|
/**
|
||
|
* @param {*} arg ignored
|
||
|
* @param {*} ctx Conversion context, originally set in Document#toJSON()
|
||
|
* @param {Class} Type If set, forces the returned collection type
|
||
|
* @returns {*} Instance of Type, Map, or Object
|
||
|
*/
|
||
|
|
||
|
}, {
|
||
|
key: "toJSON",
|
||
|
value: function toJSON(_, ctx, Type) {
|
||
|
var map = Type ? new Type() : ctx && ctx.mapAsMap ? new Map() : {};
|
||
|
if (ctx && ctx.onCreate) ctx.onCreate(map);
|
||
|
|
||
|
var _iterator2 = _createForOfIteratorHelper(this.items),
|
||
|
_step2;
|
||
|
|
||
|
try {
|
||
|
for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
|
||
|
var item = _step2.value;
|
||
|
item.addToJSMap(ctx, map);
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator2.e(err);
|
||
|
} finally {
|
||
|
_iterator2.f();
|
||
|
}
|
||
|
|
||
|
return map;
|
||
|
}
|
||
|
}, {
|
||
|
key: "toString",
|
||
|
value: function toString(ctx, onComment, onChompKeep) {
|
||
|
if (!ctx) return JSON.stringify(this);
|
||
|
|
||
|
var _iterator3 = _createForOfIteratorHelper(this.items),
|
||
|
_step3;
|
||
|
|
||
|
try {
|
||
|
for (_iterator3.s(); !(_step3 = _iterator3.n()).done;) {
|
||
|
var item = _step3.value;
|
||
|
if (!(item instanceof Pair)) throw new Error("Map items must all be pairs; found ".concat(JSON.stringify(item), " instead"));
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator3.e(err);
|
||
|
} finally {
|
||
|
_iterator3.f();
|
||
|
}
|
||
|
|
||
|
return _get(_getPrototypeOf(YAMLMap.prototype), "toString", this).call(this, ctx, {
|
||
|
blockItem: function blockItem(n) {
|
||
|
return n.str;
|
||
|
},
|
||
|
flowChars: {
|
||
|
start: '{',
|
||
|
end: '}'
|
||
|
},
|
||
|
isMap: true,
|
||
|
itemIndent: ctx.indent || ''
|
||
|
}, onComment, onChompKeep);
|
||
|
}
|
||
|
}]);
|
||
|
|
||
|
return YAMLMap;
|
||
|
}(Collection);
|
||
|
|
||
|
var MERGE_KEY = '<<';
|
||
|
var Merge = /*#__PURE__*/function (_Pair) {
|
||
|
_inherits(Merge, _Pair);
|
||
|
|
||
|
var _super = _createSuper(Merge);
|
||
|
|
||
|
function Merge(pair) {
|
||
|
var _this;
|
||
|
|
||
|
_classCallCheck(this, Merge);
|
||
|
|
||
|
if (pair instanceof Pair) {
|
||
|
var seq = pair.value;
|
||
|
|
||
|
if (!(seq instanceof YAMLSeq)) {
|
||
|
seq = new YAMLSeq();
|
||
|
seq.items.push(pair.value);
|
||
|
seq.range = pair.value.range;
|
||
|
}
|
||
|
|
||
|
_this = _super.call(this, pair.key, seq);
|
||
|
_this.range = pair.range;
|
||
|
} else {
|
||
|
_this = _super.call(this, new Scalar(MERGE_KEY), new YAMLSeq());
|
||
|
}
|
||
|
|
||
|
_this.type = Pair.Type.MERGE_PAIR;
|
||
|
return _possibleConstructorReturn(_this);
|
||
|
} // If the value associated with a merge key is a single mapping node, each of
|
||
|
// its key/value pairs is inserted into the current mapping, unless the key
|
||
|
// already exists in it. If the value associated with the merge key is a
|
||
|
// sequence, then this sequence is expected to contain mapping nodes and each
|
||
|
// of these nodes is merged in turn according to its order in the sequence.
|
||
|
// Keys in mapping nodes earlier in the sequence override keys specified in
|
||
|
// later mapping nodes. -- http://yaml.org/type/merge.html
|
||
|
|
||
|
|
||
|
_createClass(Merge, [{
|
||
|
key: "addToJSMap",
|
||
|
value: function addToJSMap(ctx, map) {
|
||
|
var _iterator = _createForOfIteratorHelper(this.value.items),
|
||
|
_step;
|
||
|
|
||
|
try {
|
||
|
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||
|
var source = _step.value.source;
|
||
|
if (!(source instanceof YAMLMap)) throw new Error('Merge sources must be maps');
|
||
|
var srcMap = source.toJSON(null, ctx, Map);
|
||
|
|
||
|
var _iterator2 = _createForOfIteratorHelper(srcMap),
|
||
|
_step2;
|
||
|
|
||
|
try {
|
||
|
for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
|
||
|
var _step2$value = _slicedToArray(_step2.value, 2),
|
||
|
key = _step2$value[0],
|
||
|
value = _step2$value[1];
|
||
|
|
||
|
if (map instanceof Map) {
|
||
|
if (!map.has(key)) map.set(key, value);
|
||
|
} else if (map instanceof Set) {
|
||
|
map.add(key);
|
||
|
} else {
|
||
|
if (!Object.prototype.hasOwnProperty.call(map, key)) map[key] = value;
|
||
|
}
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator2.e(err);
|
||
|
} finally {
|
||
|
_iterator2.f();
|
||
|
}
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator.e(err);
|
||
|
} finally {
|
||
|
_iterator.f();
|
||
|
}
|
||
|
|
||
|
return map;
|
||
|
}
|
||
|
}, {
|
||
|
key: "toString",
|
||
|
value: function toString(ctx, onComment) {
|
||
|
var seq = this.value;
|
||
|
if (seq.items.length > 1) return _get(_getPrototypeOf(Merge.prototype), "toString", this).call(this, ctx, onComment);
|
||
|
this.value = seq.items[0];
|
||
|
|
||
|
var str = _get(_getPrototypeOf(Merge.prototype), "toString", this).call(this, ctx, onComment);
|
||
|
|
||
|
this.value = seq;
|
||
|
return str;
|
||
|
}
|
||
|
}]);
|
||
|
|
||
|
return Merge;
|
||
|
}(Pair);
|
||
|
|
||
|
var binaryOptions = {
|
||
|
defaultType: Type.BLOCK_LITERAL,
|
||
|
lineWidth: 76
|
||
|
};
|
||
|
var boolOptions = {
|
||
|
trueStr: 'true',
|
||
|
falseStr: 'false'
|
||
|
};
|
||
|
var intOptions = {
|
||
|
asBigInt: false
|
||
|
};
|
||
|
var nullOptions = {
|
||
|
nullStr: 'null'
|
||
|
};
|
||
|
var strOptions = {
|
||
|
defaultType: Type.PLAIN,
|
||
|
doubleQuoted: {
|
||
|
jsonEncoding: false,
|
||
|
minMultiLineLength: 40
|
||
|
},
|
||
|
fold: {
|
||
|
lineWidth: 80,
|
||
|
minContentWidth: 20
|
||
|
}
|
||
|
};
|
||
|
|
||
|
function resolveScalar(str, tags, scalarFallback) {
|
||
|
var _iterator = _createForOfIteratorHelper(tags),
|
||
|
_step;
|
||
|
|
||
|
try {
|
||
|
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||
|
var _step$value = _step.value,
|
||
|
format = _step$value.format,
|
||
|
test = _step$value.test,
|
||
|
resolve = _step$value.resolve;
|
||
|
|
||
|
if (test) {
|
||
|
var match = str.match(test);
|
||
|
|
||
|
if (match) {
|
||
|
var res = resolve.apply(null, match);
|
||
|
if (!(res instanceof Scalar)) res = new Scalar(res);
|
||
|
if (format) res.format = format;
|
||
|
return res;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator.e(err);
|
||
|
} finally {
|
||
|
_iterator.f();
|
||
|
}
|
||
|
|
||
|
if (scalarFallback) str = scalarFallback(str);
|
||
|
return new Scalar(str);
|
||
|
}
|
||
|
|
||
|
var FOLD_FLOW = 'flow';
|
||
|
var FOLD_BLOCK = 'block';
|
||
|
var FOLD_QUOTED = 'quoted'; // presumes i+1 is at the start of a line
|
||
|
// returns index of last newline in more-indented block
|
||
|
|
||
|
var consumeMoreIndentedLines = function consumeMoreIndentedLines(text, i) {
|
||
|
var ch = text[i + 1];
|
||
|
|
||
|
while (ch === ' ' || ch === '\t') {
|
||
|
do {
|
||
|
ch = text[i += 1];
|
||
|
} while (ch && ch !== '\n');
|
||
|
|
||
|
ch = text[i + 1];
|
||
|
}
|
||
|
|
||
|
return i;
|
||
|
};
|
||
|
/**
|
||
|
* Tries to keep input at up to `lineWidth` characters, splitting only on spaces
|
||
|
* not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
|
||
|
* terminated with `\n` and started with `indent`.
|
||
|
*
|
||
|
* @param {string} text
|
||
|
* @param {string} indent
|
||
|
* @param {string} [mode='flow'] `'block'` prevents more-indented lines
|
||
|
* from being folded; `'quoted'` allows for `\` escapes, including escaped
|
||
|
* newlines
|
||
|
* @param {Object} options
|
||
|
* @param {number} [options.indentAtStart] Accounts for leading contents on
|
||
|
* the first line, defaulting to `indent.length`
|
||
|
* @param {number} [options.lineWidth=80]
|
||
|
* @param {number} [options.minContentWidth=20] Allow highly indented lines to
|
||
|
* stretch the line width
|
||
|
* @param {function} options.onFold Called once if the text is folded
|
||
|
* @param {function} options.onFold Called once if any line of text exceeds
|
||
|
* lineWidth characters
|
||
|
*/
|
||
|
|
||
|
|
||
|
function foldFlowLines(text, indent, mode, _ref) {
|
||
|
var indentAtStart = _ref.indentAtStart,
|
||
|
_ref$lineWidth = _ref.lineWidth,
|
||
|
lineWidth = _ref$lineWidth === void 0 ? 80 : _ref$lineWidth,
|
||
|
_ref$minContentWidth = _ref.minContentWidth,
|
||
|
minContentWidth = _ref$minContentWidth === void 0 ? 20 : _ref$minContentWidth,
|
||
|
onFold = _ref.onFold,
|
||
|
onOverflow = _ref.onOverflow;
|
||
|
if (!lineWidth || lineWidth < 0) return text;
|
||
|
var endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
|
||
|
if (text.length <= endStep) return text;
|
||
|
var folds = [];
|
||
|
var escapedFolds = {};
|
||
|
var end = lineWidth - (typeof indentAtStart === 'number' ? indentAtStart : indent.length);
|
||
|
var split = undefined;
|
||
|
var prev = undefined;
|
||
|
var overflow = false;
|
||
|
var i = -1;
|
||
|
|
||
|
if (mode === FOLD_BLOCK) {
|
||
|
i = consumeMoreIndentedLines(text, i);
|
||
|
if (i !== -1) end = i + endStep;
|
||
|
}
|
||
|
|
||
|
for (var ch; ch = text[i += 1];) {
|
||
|
if (mode === FOLD_QUOTED && ch === '\\') {
|
||
|
switch (text[i + 1]) {
|
||
|
case 'x':
|
||
|
i += 3;
|
||
|
break;
|
||
|
|
||
|
case 'u':
|
||
|
i += 5;
|
||
|
break;
|
||
|
|
||
|
case 'U':
|
||
|
i += 9;
|
||
|
break;
|
||
|
|
||
|
default:
|
||
|
i += 1;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (ch === '\n') {
|
||
|
if (mode === FOLD_BLOCK) i = consumeMoreIndentedLines(text, i);
|
||
|
end = i + endStep;
|
||
|
split = undefined;
|
||
|
} else {
|
||
|
if (ch === ' ' && prev && prev !== ' ' && prev !== '\n' && prev !== '\t') {
|
||
|
// space surrounded by non-space can be replaced with newline + indent
|
||
|
var next = text[i + 1];
|
||
|
if (next && next !== ' ' && next !== '\n' && next !== '\t') split = i;
|
||
|
}
|
||
|
|
||
|
if (i >= end) {
|
||
|
if (split) {
|
||
|
folds.push(split);
|
||
|
end = split + endStep;
|
||
|
split = undefined;
|
||
|
} else if (mode === FOLD_QUOTED) {
|
||
|
// white-space collected at end may stretch past lineWidth
|
||
|
while (prev === ' ' || prev === '\t') {
|
||
|
prev = ch;
|
||
|
ch = text[i += 1];
|
||
|
overflow = true;
|
||
|
} // i - 2 accounts for not-dropped last char + newline-escaping \
|
||
|
|
||
|
|
||
|
folds.push(i - 2);
|
||
|
escapedFolds[i - 2] = true;
|
||
|
end = i - 2 + endStep;
|
||
|
split = undefined;
|
||
|
} else {
|
||
|
overflow = true;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
prev = ch;
|
||
|
}
|
||
|
|
||
|
if (overflow && onOverflow) onOverflow();
|
||
|
if (folds.length === 0) return text;
|
||
|
if (onFold) onFold();
|
||
|
var res = text.slice(0, folds[0]);
|
||
|
|
||
|
for (var _i = 0; _i < folds.length; ++_i) {
|
||
|
var fold = folds[_i];
|
||
|
|
||
|
var _end = folds[_i + 1] || text.length;
|
||
|
|
||
|
if (mode === FOLD_QUOTED && escapedFolds[fold]) res += "".concat(text[fold], "\\");
|
||
|
res += "\n".concat(indent).concat(text.slice(fold + 1, _end));
|
||
|
}
|
||
|
|
||
|
return res;
|
||
|
}
|
||
|
|
||
|
var getFoldOptions = function getFoldOptions(_ref) {
|
||
|
var indentAtStart = _ref.indentAtStart;
|
||
|
return indentAtStart ? Object.assign({
|
||
|
indentAtStart: indentAtStart
|
||
|
}, strOptions.fold) : strOptions.fold;
|
||
|
}; // Also checks for lines starting with %, as parsing the output as YAML 1.1 will
|
||
|
// presume that's starting a new document.
|
||
|
|
||
|
|
||
|
var containsDocumentMarker = function containsDocumentMarker(str) {
|
||
|
return /^(%|---|\.\.\.)/m.test(str);
|
||
|
};
|
||
|
|
||
|
function lineLengthOverLimit(str, limit) {
|
||
|
var strLen = str.length;
|
||
|
if (strLen <= limit) return false;
|
||
|
|
||
|
for (var i = 0, start = 0; i < strLen; ++i) {
|
||
|
if (str[i] === '\n') {
|
||
|
if (i - start > limit) return true;
|
||
|
start = i + 1;
|
||
|
if (strLen - start <= limit) return false;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return true;
|
||
|
}
|
||
|
|
||
|
function doubleQuotedString(value, ctx) {
|
||
|
var implicitKey = ctx.implicitKey;
|
||
|
var _strOptions$doubleQuo = strOptions.doubleQuoted,
|
||
|
jsonEncoding = _strOptions$doubleQuo.jsonEncoding,
|
||
|
minMultiLineLength = _strOptions$doubleQuo.minMultiLineLength;
|
||
|
var json = JSON.stringify(value);
|
||
|
if (jsonEncoding) return json;
|
||
|
var indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
|
||
|
var str = '';
|
||
|
var start = 0;
|
||
|
|
||
|
for (var i = 0, ch = json[i]; ch; ch = json[++i]) {
|
||
|
if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') {
|
||
|
// space before newline needs to be escaped to not be folded
|
||
|
str += json.slice(start, i) + '\\ ';
|
||
|
i += 1;
|
||
|
start = i;
|
||
|
ch = '\\';
|
||
|
}
|
||
|
|
||
|
if (ch === '\\') switch (json[i + 1]) {
|
||
|
case 'u':
|
||
|
{
|
||
|
str += json.slice(start, i);
|
||
|
var code = json.substr(i + 2, 4);
|
||
|
|
||
|
switch (code) {
|
||
|
case '0000':
|
||
|
str += '\\0';
|
||
|
break;
|
||
|
|
||
|
case '0007':
|
||
|
str += '\\a';
|
||
|
break;
|
||
|
|
||
|
case '000b':
|
||
|
str += '\\v';
|
||
|
break;
|
||
|
|
||
|
case '001b':
|
||
|
str += '\\e';
|
||
|
break;
|
||
|
|
||
|
case '0085':
|
||
|
str += '\\N';
|
||
|
break;
|
||
|
|
||
|
case '00a0':
|
||
|
str += '\\_';
|
||
|
break;
|
||
|
|
||
|
case '2028':
|
||
|
str += '\\L';
|
||
|
break;
|
||
|
|
||
|
case '2029':
|
||
|
str += '\\P';
|
||
|
break;
|
||
|
|
||
|
default:
|
||
|
if (code.substr(0, 2) === '00') str += '\\x' + code.substr(2);else str += json.substr(i, 6);
|
||
|
}
|
||
|
|
||
|
i += 5;
|
||
|
start = i + 1;
|
||
|
}
|
||
|
break;
|
||
|
|
||
|
case 'n':
|
||
|
if (implicitKey || json[i + 2] === '"' || json.length < minMultiLineLength) {
|
||
|
i += 1;
|
||
|
} else {
|
||
|
// folding will eat first newline
|
||
|
str += json.slice(start, i) + '\n\n';
|
||
|
|
||
|
while (json[i + 2] === '\\' && json[i + 3] === 'n' && json[i + 4] !== '"') {
|
||
|
str += '\n';
|
||
|
i += 2;
|
||
|
}
|
||
|
|
||
|
str += indent; // space after newline needs to be escaped to not be folded
|
||
|
|
||
|
if (json[i + 2] === ' ') str += '\\';
|
||
|
i += 1;
|
||
|
start = i + 1;
|
||
|
}
|
||
|
|
||
|
break;
|
||
|
|
||
|
default:
|
||
|
i += 1;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
str = start ? str + json.slice(start) : json;
|
||
|
return implicitKey ? str : foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx));
|
||
|
}
|
||
|
|
||
|
function singleQuotedString(value, ctx) {
|
||
|
if (ctx.implicitKey) {
|
||
|
if (/\n/.test(value)) return doubleQuotedString(value, ctx);
|
||
|
} else {
|
||
|
// single quoted string can't have leading or trailing whitespace around newline
|
||
|
if (/[ \t]\n|\n[ \t]/.test(value)) return doubleQuotedString(value, ctx);
|
||
|
}
|
||
|
|
||
|
var indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
|
||
|
var res = "'" + value.replace(/'/g, "''").replace(/\n+/g, "$&\n".concat(indent)) + "'";
|
||
|
return ctx.implicitKey ? res : foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx));
|
||
|
}
|
||
|
|
||
|
function blockString(_ref2, ctx, onComment, onChompKeep) {
|
||
|
var comment = _ref2.comment,
|
||
|
type = _ref2.type,
|
||
|
value = _ref2.value;
|
||
|
|
||
|
// 1. Block can't end in whitespace unless the last line is non-empty.
|
||
|
// 2. Strings consisting of only whitespace are best rendered explicitly.
|
||
|
if (/\n[\t ]+$/.test(value) || /^\s*$/.test(value)) {
|
||
|
return doubleQuotedString(value, ctx);
|
||
|
}
|
||
|
|
||
|
var indent = ctx.indent || (ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');
|
||
|
var indentSize = indent ? '2' : '1'; // root is at -1
|
||
|
|
||
|
var literal = type === Type.BLOCK_FOLDED ? false : type === Type.BLOCK_LITERAL ? true : !lineLengthOverLimit(value, strOptions.fold.lineWidth - indent.length);
|
||
|
var header = literal ? '|' : '>';
|
||
|
if (!value) return header + '\n';
|
||
|
var wsStart = '';
|
||
|
var wsEnd = '';
|
||
|
value = value.replace(/[\n\t ]*$/, function (ws) {
|
||
|
var n = ws.indexOf('\n');
|
||
|
|
||
|
if (n === -1) {
|
||
|
header += '-'; // strip
|
||
|
} else if (value === ws || n !== ws.length - 1) {
|
||
|
header += '+'; // keep
|
||
|
|
||
|
if (onChompKeep) onChompKeep();
|
||
|
}
|
||
|
|
||
|
wsEnd = ws.replace(/\n$/, '');
|
||
|
return '';
|
||
|
}).replace(/^[\n ]*/, function (ws) {
|
||
|
if (ws.indexOf(' ') !== -1) header += indentSize;
|
||
|
var m = ws.match(/ +$/);
|
||
|
|
||
|
if (m) {
|
||
|
wsStart = ws.slice(0, -m[0].length);
|
||
|
return m[0];
|
||
|
} else {
|
||
|
wsStart = ws;
|
||
|
return '';
|
||
|
}
|
||
|
});
|
||
|
if (wsEnd) wsEnd = wsEnd.replace(/\n+(?!\n|$)/g, "$&".concat(indent));
|
||
|
if (wsStart) wsStart = wsStart.replace(/\n+/g, "$&".concat(indent));
|
||
|
|
||
|
if (comment) {
|
||
|
header += ' #' + comment.replace(/ ?[\r\n]+/g, ' ');
|
||
|
if (onComment) onComment();
|
||
|
}
|
||
|
|
||
|
if (!value) return "".concat(header).concat(indentSize, "\n").concat(indent).concat(wsEnd);
|
||
|
|
||
|
if (literal) {
|
||
|
value = value.replace(/\n+/g, "$&".concat(indent));
|
||
|
return "".concat(header, "\n").concat(indent).concat(wsStart).concat(value).concat(wsEnd);
|
||
|
}
|
||
|
|
||
|
value = value.replace(/\n+/g, '\n$&').replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
|
||
|
// ^ ind.line ^ empty ^ capture next empty lines only at end of indent
|
||
|
.replace(/\n+/g, "$&".concat(indent));
|
||
|
var body = foldFlowLines("".concat(wsStart).concat(value).concat(wsEnd), indent, FOLD_BLOCK, strOptions.fold);
|
||
|
return "".concat(header, "\n").concat(indent).concat(body);
|
||
|
}
|
||
|
|
||
|
function plainString(item, ctx, onComment, onChompKeep) {
|
||
|
var comment = item.comment,
|
||
|
type = item.type,
|
||
|
value = item.value;
|
||
|
var actualString = ctx.actualString,
|
||
|
implicitKey = ctx.implicitKey,
|
||
|
indent = ctx.indent,
|
||
|
inFlow = ctx.inFlow;
|
||
|
|
||
|
if (implicitKey && /[\n[\]{},]/.test(value) || inFlow && /[[\]{},]/.test(value)) {
|
||
|
return doubleQuotedString(value, ctx);
|
||
|
}
|
||
|
|
||
|
if (!value || /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
|
||
|
// not allowed:
|
||
|
// - empty string, '-' or '?'
|
||
|
// - start with an indicator character (except [?:-]) or /[?-] /
|
||
|
// - '\n ', ': ' or ' \n' anywhere
|
||
|
// - '#' not preceded by a non-space char
|
||
|
// - end with ' ' or ':'
|
||
|
return implicitKey || inFlow || value.indexOf('\n') === -1 ? value.indexOf('"') !== -1 && value.indexOf("'") === -1 ? singleQuotedString(value, ctx) : doubleQuotedString(value, ctx) : blockString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
|
||
|
if (!implicitKey && !inFlow && type !== Type.PLAIN && value.indexOf('\n') !== -1) {
|
||
|
// Where allowed & type not set explicitly, prefer block style for multiline strings
|
||
|
return blockString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
|
||
|
if (indent === '' && containsDocumentMarker(value)) {
|
||
|
ctx.forceBlockIndent = true;
|
||
|
return blockString(item, ctx, onComment, onChompKeep);
|
||
|
}
|
||
|
|
||
|
var str = value.replace(/\n+/g, "$&\n".concat(indent)); // Verify that output will be parsed as a string, as e.g. plain numbers and
|
||
|
// booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),
|
||
|
// and others in v1.1.
|
||
|
|
||
|
if (actualString) {
|
||
|
var tags = ctx.doc.schema.tags;
|
||
|
var resolved = resolveScalar(str, tags, tags.scalarFallback).value;
|
||
|
if (typeof resolved !== 'string') return doubleQuotedString(value, ctx);
|
||
|
}
|
||
|
|
||
|
var body = implicitKey ? str : foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx));
|
||
|
|
||
|
if (comment && !inFlow && (body.indexOf('\n') !== -1 || comment.indexOf('\n') !== -1)) {
|
||
|
if (onComment) onComment();
|
||
|
return addCommentBefore(body, indent, comment);
|
||
|
}
|
||
|
|
||
|
return body;
|
||
|
}
|
||
|
|
||
|
function stringifyString(item, ctx, onComment, onChompKeep) {
|
||
|
var defaultType = strOptions.defaultType;
|
||
|
var implicitKey = ctx.implicitKey,
|
||
|
inFlow = ctx.inFlow;
|
||
|
var _item = item,
|
||
|
type = _item.type,
|
||
|
value = _item.value;
|
||
|
|
||
|
if (typeof value !== 'string') {
|
||
|
value = String(value);
|
||
|
item = Object.assign({}, item, {
|
||
|
value: value
|
||
|
});
|
||
|
}
|
||
|
|
||
|
var _stringify = function _stringify(_type) {
|
||
|
switch (_type) {
|
||
|
case Type.BLOCK_FOLDED:
|
||
|
case Type.BLOCK_LITERAL:
|
||
|
return blockString(item, ctx, onComment, onChompKeep);
|
||
|
|
||
|
case Type.QUOTE_DOUBLE:
|
||
|
return doubleQuotedString(value, ctx);
|
||
|
|
||
|
case Type.QUOTE_SINGLE:
|
||
|
return singleQuotedString(value, ctx);
|
||
|
|
||
|
case Type.PLAIN:
|
||
|
return plainString(item, ctx, onComment, onChompKeep);
|
||
|
|
||
|
default:
|
||
|
return null;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
if (type !== Type.QUOTE_DOUBLE && /[\x00-\x08\x0b-\x1f\x7f-\x9f]/.test(value)) {
|
||
|
// force double quotes on control characters
|
||
|
type = Type.QUOTE_DOUBLE;
|
||
|
} else if ((implicitKey || inFlow) && (type === Type.BLOCK_FOLDED || type === Type.BLOCK_LITERAL)) {
|
||
|
// should not happen; blocks are not valid inside flow containers
|
||
|
type = Type.QUOTE_DOUBLE;
|
||
|
}
|
||
|
|
||
|
var res = _stringify(type);
|
||
|
|
||
|
if (res === null) {
|
||
|
res = _stringify(defaultType);
|
||
|
if (res === null) throw new Error("Unsupported default string type ".concat(defaultType));
|
||
|
}
|
||
|
|
||
|
return res;
|
||
|
}
|
||
|
|
||
|
function stringifyNumber(_ref) {
|
||
|
var format = _ref.format,
|
||
|
minFractionDigits = _ref.minFractionDigits,
|
||
|
tag = _ref.tag,
|
||
|
value = _ref.value;
|
||
|
if (typeof value === 'bigint') return String(value);
|
||
|
if (!isFinite(value)) return isNaN(value) ? '.nan' : value < 0 ? '-.inf' : '.inf';
|
||
|
var n = JSON.stringify(value);
|
||
|
|
||
|
if (!format && minFractionDigits && (!tag || tag === 'tag:yaml.org,2002:float') && /^\d/.test(n)) {
|
||
|
var i = n.indexOf('.');
|
||
|
|
||
|
if (i < 0) {
|
||
|
i = n.length;
|
||
|
n += '.';
|
||
|
}
|
||
|
|
||
|
var d = minFractionDigits - (n.length - i - 1);
|
||
|
|
||
|
while (d-- > 0) {
|
||
|
n += '0';
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return n;
|
||
|
}
|
||
|
|
||
|
function checkFlowCollectionEnd(errors, cst) {
|
||
|
var char, name;
|
||
|
|
||
|
switch (cst.type) {
|
||
|
case Type.FLOW_MAP:
|
||
|
char = '}';
|
||
|
name = 'flow map';
|
||
|
break;
|
||
|
|
||
|
case Type.FLOW_SEQ:
|
||
|
char = ']';
|
||
|
name = 'flow sequence';
|
||
|
break;
|
||
|
|
||
|
default:
|
||
|
errors.push(new YAMLSemanticError(cst, 'Not a flow collection!?'));
|
||
|
return;
|
||
|
}
|
||
|
|
||
|
var lastItem;
|
||
|
|
||
|
for (var i = cst.items.length - 1; i >= 0; --i) {
|
||
|
var item = cst.items[i];
|
||
|
|
||
|
if (!item || item.type !== Type.COMMENT) {
|
||
|
lastItem = item;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (lastItem && lastItem.char !== char) {
|
||
|
var msg = "Expected ".concat(name, " to end with ").concat(char);
|
||
|
var err;
|
||
|
|
||
|
if (typeof lastItem.offset === 'number') {
|
||
|
err = new YAMLSemanticError(cst, msg);
|
||
|
err.offset = lastItem.offset + 1;
|
||
|
} else {
|
||
|
err = new YAMLSemanticError(lastItem, msg);
|
||
|
if (lastItem.range && lastItem.range.end) err.offset = lastItem.range.end - lastItem.range.start;
|
||
|
}
|
||
|
|
||
|
errors.push(err);
|
||
|
}
|
||
|
}
|
||
|
function checkFlowCommentSpace(errors, comment) {
|
||
|
var prev = comment.context.src[comment.range.start - 1];
|
||
|
|
||
|
if (prev !== '\n' && prev !== '\t' && prev !== ' ') {
|
||
|
var msg = 'Comments must be separated from other tokens by white space characters';
|
||
|
errors.push(new YAMLSemanticError(comment, msg));
|
||
|
}
|
||
|
}
|
||
|
function getLongKeyError(source, key) {
|
||
|
var sk = String(key);
|
||
|
var k = sk.substr(0, 8) + '...' + sk.substr(-8);
|
||
|
return new YAMLSemanticError(source, "The \"".concat(k, "\" key is too long"));
|
||
|
}
|
||
|
function resolveComments(collection, comments) {
|
||
|
var _iterator = _createForOfIteratorHelper(comments),
|
||
|
_step;
|
||
|
|
||
|
try {
|
||
|
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||
|
var _step$value = _step.value,
|
||
|
afterKey = _step$value.afterKey,
|
||
|
before = _step$value.before,
|
||
|
comment = _step$value.comment;
|
||
|
var item = collection.items[before];
|
||
|
|
||
|
if (!item) {
|
||
|
if (comment !== undefined) {
|
||
|
if (collection.comment) collection.comment += '\n' + comment;else collection.comment = comment;
|
||
|
}
|
||
|
} else {
|
||
|
if (afterKey && item.value) item = item.value;
|
||
|
|
||
|
if (comment === undefined) {
|
||
|
if (afterKey || !item.commentBefore) item.spaceBefore = true;
|
||
|
} else {
|
||
|
if (item.commentBefore) item.commentBefore += '\n' + comment;else item.commentBefore = comment;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator.e(err);
|
||
|
} finally {
|
||
|
_iterator.f();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// on error, will return { str: string, errors: Error[] }
|
||
|
function resolveString(doc, node) {
|
||
|
var res = node.strValue;
|
||
|
if (!res) return '';
|
||
|
if (typeof res === 'string') return res;
|
||
|
res.errors.forEach(function (error) {
|
||
|
if (!error.source) error.source = node;
|
||
|
doc.errors.push(error);
|
||
|
});
|
||
|
return res.str;
|
||
|
}
|
||
|
|
||
|
function resolveTagHandle(doc, node) {
|
||
|
var _node$tag = node.tag,
|
||
|
handle = _node$tag.handle,
|
||
|
suffix = _node$tag.suffix;
|
||
|
var prefix = doc.tagPrefixes.find(function (p) {
|
||
|
return p.handle === handle;
|
||
|
});
|
||
|
|
||
|
if (!prefix) {
|
||
|
var dtp = doc.getDefaults().tagPrefixes;
|
||
|
if (dtp) prefix = dtp.find(function (p) {
|
||
|
return p.handle === handle;
|
||
|
});
|
||
|
if (!prefix) throw new YAMLSemanticError(node, "The ".concat(handle, " tag handle is non-default and was not declared."));
|
||
|
}
|
||
|
|
||
|
if (!suffix) throw new YAMLSemanticError(node, "The ".concat(handle, " tag has no suffix."));
|
||
|
|
||
|
if (handle === '!' && (doc.version || doc.options.version) === '1.0') {
|
||
|
if (suffix[0] === '^') {
|
||
|
doc.warnings.push(new YAMLWarning(node, 'YAML 1.0 ^ tag expansion is not supported'));
|
||
|
return suffix;
|
||
|
}
|
||
|
|
||
|
if (/[:/]/.test(suffix)) {
|
||
|
// word/foo -> tag:word.yaml.org,2002:foo
|
||
|
var vocab = suffix.match(/^([a-z0-9-]+)\/(.*)/i);
|
||
|
return vocab ? "tag:".concat(vocab[1], ".yaml.org,2002:").concat(vocab[2]) : "tag:".concat(suffix);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return prefix.prefix + decodeURIComponent(suffix);
|
||
|
}
|
||
|
|
||
|
function resolveTagName(doc, node) {
|
||
|
var tag = node.tag,
|
||
|
type = node.type;
|
||
|
var nonSpecific = false;
|
||
|
|
||
|
if (tag) {
|
||
|
var handle = tag.handle,
|
||
|
suffix = tag.suffix,
|
||
|
verbatim = tag.verbatim;
|
||
|
|
||
|
if (verbatim) {
|
||
|
if (verbatim !== '!' && verbatim !== '!!') return verbatim;
|
||
|
var msg = "Verbatim tags aren't resolved, so ".concat(verbatim, " is invalid.");
|
||
|
doc.errors.push(new YAMLSemanticError(node, msg));
|
||
|
} else if (handle === '!' && !suffix) {
|
||
|
nonSpecific = true;
|
||
|
} else {
|
||
|
try {
|
||
|
return resolveTagHandle(doc, node);
|
||
|
} catch (error) {
|
||
|
doc.errors.push(error);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
switch (type) {
|
||
|
case Type.BLOCK_FOLDED:
|
||
|
case Type.BLOCK_LITERAL:
|
||
|
case Type.QUOTE_DOUBLE:
|
||
|
case Type.QUOTE_SINGLE:
|
||
|
return defaultTags.STR;
|
||
|
|
||
|
case Type.FLOW_MAP:
|
||
|
case Type.MAP:
|
||
|
return defaultTags.MAP;
|
||
|
|
||
|
case Type.FLOW_SEQ:
|
||
|
case Type.SEQ:
|
||
|
return defaultTags.SEQ;
|
||
|
|
||
|
case Type.PLAIN:
|
||
|
return nonSpecific ? defaultTags.STR : null;
|
||
|
|
||
|
default:
|
||
|
return null;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function resolveByTagName(doc, node, tagName) {
|
||
|
var tags = doc.schema.tags;
|
||
|
var matchWithTest = [];
|
||
|
|
||
|
var _iterator = _createForOfIteratorHelper(tags),
|
||
|
_step;
|
||
|
|
||
|
try {
|
||
|
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||
|
var tag = _step.value;
|
||
|
|
||
|
if (tag.tag === tagName) {
|
||
|
if (tag.test) matchWithTest.push(tag);else {
|
||
|
var res = tag.resolve(doc, node);
|
||
|
return res instanceof Collection ? res : new Scalar(res);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator.e(err);
|
||
|
} finally {
|
||
|
_iterator.f();
|
||
|
}
|
||
|
|
||
|
var str = resolveString(doc, node);
|
||
|
if (typeof str === 'string' && matchWithTest.length > 0) return resolveScalar(str, matchWithTest, tags.scalarFallback);
|
||
|
return null;
|
||
|
}
|
||
|
|
||
|
function getFallbackTagName(_ref) {
|
||
|
var type = _ref.type;
|
||
|
|
||
|
switch (type) {
|
||
|
case Type.FLOW_MAP:
|
||
|
case Type.MAP:
|
||
|
return defaultTags.MAP;
|
||
|
|
||
|
case Type.FLOW_SEQ:
|
||
|
case Type.SEQ:
|
||
|
return defaultTags.SEQ;
|
||
|
|
||
|
default:
|
||
|
return defaultTags.STR;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
function resolveTag(doc, node, tagName) {
|
||
|
try {
|
||
|
var res = resolveByTagName(doc, node, tagName);
|
||
|
|
||
|
if (res) {
|
||
|
if (tagName && node.tag) res.tag = tagName;
|
||
|
return res;
|
||
|
}
|
||
|
} catch (error) {
|
||
|
/* istanbul ignore if */
|
||
|
if (!error.source) error.source = node;
|
||
|
doc.errors.push(error);
|
||
|
return null;
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
var fallback = getFallbackTagName(node);
|
||
|
if (!fallback) throw new Error("The tag ".concat(tagName, " is unavailable"));
|
||
|
var msg = "The tag ".concat(tagName, " is unavailable, falling back to ").concat(fallback);
|
||
|
doc.warnings.push(new YAMLWarning(node, msg));
|
||
|
|
||
|
var _res = resolveByTagName(doc, node, fallback);
|
||
|
|
||
|
_res.tag = tagName;
|
||
|
return _res;
|
||
|
} catch (error) {
|
||
|
var refError = new YAMLReferenceError(node, error.message);
|
||
|
refError.stack = error.stack;
|
||
|
doc.errors.push(refError);
|
||
|
return null;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var isCollectionItem = function isCollectionItem(node) {
|
||
|
if (!node) return false;
|
||
|
var type = node.type;
|
||
|
return type === Type.MAP_KEY || type === Type.MAP_VALUE || type === Type.SEQ_ITEM;
|
||
|
};
|
||
|
|
||
|
function resolveNodeProps(errors, node) {
|
||
|
var comments = {
|
||
|
before: [],
|
||
|
after: []
|
||
|
};
|
||
|
var hasAnchor = false;
|
||
|
var hasTag = false;
|
||
|
var props = isCollectionItem(node.context.parent) ? node.context.parent.props.concat(node.props) : node.props;
|
||
|
|
||
|
var _iterator = _createForOfIteratorHelper(props),
|
||
|
_step;
|
||
|
|
||
|
try {
|
||
|
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
||
|
var _step$value = _step.value,
|
||
|
start = _step$value.start,
|
||
|
end = _step$value.end;
|
||
|
|
||
|
switch (node.context.src[start]) {
|
||
|
case Char.COMMENT:
|
||
|
{
|
||
|
if (!node.commentHasRequiredWhitespace(start)) {
|
||
|
var msg = 'Comments must be separated from other tokens by white space characters';
|
||
|
errors.push(new YAMLSemanticError(node, msg));
|
||
|
}
|
||
|
|
||
|
var header = node.header,
|
||
|
valueRange = node.valueRange;
|
||
|
var cc = valueRange && (start > valueRange.start || header && start > header.start) ? comments.after : comments.before;
|
||
|
cc.push(node.context.src.slice(start + 1, end));
|
||
|
break;
|
||
|
}
|
||
|
// Actual anchor & tag resolution is handled by schema, here we just complain
|
||
|
|
||
|
case Char.ANCHOR:
|
||
|
if (hasAnchor) {
|
||
|
var _msg = 'A node can have at most one anchor';
|
||
|
errors.push(new YAMLSemanticError(node, _msg));
|
||
|
}
|
||
|
|
||
|
hasAnchor = true;
|
||
|
break;
|
||
|
|
||
|
case Char.TAG:
|
||
|
if (hasTag) {
|
||
|
var _msg2 = 'A node can have at most one tag';
|
||
|
errors.push(new YAMLSemanticError(node, _msg2));
|
||
|
}
|
||
|
|
||
|
hasTag = true;
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
} catch (err) {
|
||
|
_iterator.e(err);
|
||
|
} finally {
|
||
|
_iterator.f();
|
||
|
}
|
||
|
|
||
|
return {
|
||
|
comments: comments,
|
||
|
hasAnchor: hasAnchor,
|
||
|
hasTag: hasTag
|
||
|
};
|
||
|
}
|
||
|
|
||
|
function resolveNodeValue(doc, node) {
|
||
|
var anchors = doc.anchors,
|
||
|
errors = doc.errors,
|
||
|
schema = doc.schema;
|
||
|
|
||
|
if (node.type === Type.ALIAS) {
|
||
|
var name = node.rawValue;
|
||
|
var src = anchors.getNode(name);
|
||
|
|
||
|
if (!src) {
|
||
|
var msg = "Aliased anchor not found: ".concat(name);
|
||
|
errors.push(new YAMLReferenceError(node, msg));
|
||
|
return null;
|
||
|
} // Lazy resolution for circular references
|
||
|
|
||
|
|
||
|
var res = new Alias(src);
|
||
|
|
||
|
anchors._cstAliases.push(res);
|
||
|
|
||
|
return res;
|
||
|
}
|
||
|
|
||
|
var tagName = resolveTagName(doc, node);
|
||
|
if (tagName) return resolveTag(doc, node, tagName);
|
||
|
|
||
|
if (node.type !== Type.PLAIN) {
|
||
|
var _msg3 = "Failed to resolve ".concat(node.type, " node here");
|
||
|
|
||
|
errors.push(new YAMLSyntaxError(node, _msg3));
|
||
|
return null;
|
||
|
}
|
||
|
|
||
|
try {
|
||
|
var str = resolveString(doc, node);
|
||
|
return resolveScalar(str, schema.tags, schema.tags.scalarFallback);
|
||
|
} catch (error) {
|
||
|
if (!error.source) error.source = node;
|
||
|
errors.push(error);
|
||
|
return null;
|
||
|
}
|
||
|
} // sets node.resolved on success
|
||
|
|
||
|
|
||
|
function resolveNode(doc, node) {
|
||
|
if (!node) return null;
|
||
|
if (node.error) doc.errors.push(node.error);
|
||
|
|
||
|
var _resolveNodeProps = resolveNodeProps(doc.errors, node),
|
||
|
comments = _resolveNodeProps.comments,
|
||
|
hasAnchor = _resolveNodeProps.hasAnchor,
|
||
|
hasTag = _resolveNodeProps.hasTag;
|
||
|
|
||
|
if (hasAnchor) {
|
||
|
var anchors = doc.anchors;
|
||
|
var name = node.anchor;
|
||
|
var prev = anchors.getNode(name); // At this point, aliases for any preceding node with the same anchor
|
||
|
// name have already been resolved, so it may safely be renamed.
|
||
|
|
||
|
if (prev) anchors.map[anchors.newName(name)] = prev; // During parsing, we need to store the CST node in anchors.map as
|
||
|
// anchors need to be available during resolution to allow for
|
||
|
// circular references.
|
||
|
|
||
|
anchors.map[name] = node;
|
||
|
}
|
||
|
|
||
|
if (node.type === Type.ALIAS && (hasAnchor || hasTag)) {
|
||
|
var msg = 'An alias node must not specify any properties';
|
||
|
doc.errors.push(new YAMLSemanticError(node, msg));
|
||
|
}
|
||
|
|
||
|
var res = resolveNodeValue(doc, node);
|
||
|
|
||
|
if (res) {
|
||
|
res.range = [node.range.start, node.range.end];
|
||
|
if (doc.options.keepCstNodes) res.cstNode = node;
|
||
|
if (doc.options.keepNodeTypes) res.type = node.type;
|
||
|
var cb = comments.before.join('\n');
|
||
|
|
||
|
if (cb) {
|
||
|
res.commentBefore = res.commentBefore ? "".concat(res.commentBefore, "\n").concat(cb) : cb;
|
||
|
}
|
||
|
|
||
|
var ca = comments.after.join('\n');
|
||
|
if (ca) res.comment = res.comment ? "".concat(res.comment, "\n").concat(ca) : ca;
|
||
|
}
|
||
|
|
||
|
return node.resolved = res;
|
||
|
}
|
||
|
|
||
|
function resolveMap(doc, cst) {
|
||
|
if (cst.type !== Type.MAP && cst.type !== Type.FLOW_MAP) {
|
||
|
var msg = "A ".concat(cst.type, " node cannot be resolved as a mapping");
|
||
|
doc.errors.push(new YAMLSyntaxError(cst, msg));
|
||
|
return null;
|
||
|
}
|
||
|
|
||
|
var _ref = cst.type === Type.FLOW_MAP ? resolveFlowMapItems(doc, cst) : resolveBlockMapItems(doc, cst),
|
||
|
comments = _ref.comments,
|
||
|
items = _ref.items;
|
||
|
|
||
|
var map = new YAMLMap();
|
||
|
map.items = items;
|
||
|
resolveComments(map, comments);
|
||
|
var hasCollectionKey = false;
|
||
|
|
||
|
for (var i = 0; i < items.length; ++i) {
|
||
|
var iKey = items[i].key;
|
||
|
if (iKey instanceof Collection) hasCollectionKey = true;
|
||
|
|
||
|
if (doc.schema.merge && iKey && iKey.value === MERGE_KEY) {
|
||
|
items[i] = new Merge(items[i]);
|
||
|
var sources = items[i].value.items;
|
||
|
var error = null;
|
||
|
sources.some(function (node) {
|
||
|
if (node instanceof Alias) {
|
||
|
// During parsing, alias sources are CST nodes; to account for
|
||
|
// circular references their resolved values can't be used here.
|
||
|
var type = node.source.type;
|
||
|
if (type === Type.MAP || type === Type.FLOW_MAP) return false;
|
||
|
return error = 'Merge nodes aliases can only point to maps';
|
||
|
}
|
||
|
|
||
|
return error = 'Merge nodes can only have Alias nodes as values';
|
||
|
});
|
||
|
if (error) doc.errors.push(new YAMLSemanticError(cst, error));
|
||
|
} else {
|
||
|
for (var j = i + 1; j < items.length; ++j) {
|
||
|
var jKey = items[j].key;
|
||
|
|
||
|
if (iKey === jKey || iKey && jKey && Object.prototype.hasOwnProperty.call(iKey, 'value') && iKey.value === jKey.value) {
|
||
|
var _msg = "Map keys must be unique; \"".concat(iKey, "\" is repeated");
|
||
|
|
||
|
doc.errors.push(new YAMLSemanticError(cst, _msg));
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (hasCollectionKey && !doc.options.mapAsMap) {
|
||
|
var warn = 'Keys with collection values will be stringified as YAML due to JS Object restrictions. Use mapAsMap: true to avoid this.';
|
||
|
doc.warnings.push(new YAMLWarning(cst, warn));
|
||
|
}
|
||
|
|
||
|
cst.resolved = map;
|
||
|
return map;
|
||
|
}
|
||
|
|
||
|
var valueHasPairComment = function valueHasPairComment(_ref2) {
|
||
|
var _ref2$context = _ref2.context,
|
||
|
lineStart = _ref2$context.lineStart,
|
||
|
node = _ref2$context.node,
|
||
|
src = _ref2$context.src,
|
||
|
props = _ref2.props;
|
||
|
if (props.length === 0) return false;
|
||
|
var start = props[0].start;
|
||
|
if (node && start > node.valueRange.start) return false;
|
||
|
if (src[start] !== Char.COMMENT) return false;
|
||
|
|
||
|
for (var i = lineStart; i < start; ++i) {
|
||
|
if (src[i] === '\n') return false;
|
||
|
}
|
||
|
|
||
|
return true;
|
||
|
};
|
||
|
|
||
|
function resolvePairComment(item, pair) {
|
||
|
if (!valueHasPairComment(item)) return;
|
||
|
var comment = item.getPropValue(0, Char.COMMENT, true);
|
||
|
var found = false;
|
||
|
var cb = pair.value.commentBefore;
|
||
|
|
||
|
if (cb && cb.startsWith(comment)) {
|
||
|
pair.value.commentBefore = cb.substr(comment.length + 1);
|
||
|
found = true;
|
||
|
} else {
|
||
|
var cc = pair.value.comment;
|
||
|
|
||
|
if (!item.node && cc && cc.startsWith(comment)) {
|
||
|
pair.value.comment = cc.substr(comment.length + 1);
|
||
|
found = true;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (found) pair.comment = comment;
|
||
|
}
|
||
|
|
||
|
function resolveBlockMapItems(doc, cst) {
|
||
|
var comments = [];
|
||
|
var items = [];
|
||
|
var key = undefined;
|
||
|
var keyStart = null;
|
||
|
|
||
|
for (var i = 0; i < cst.items.length; ++i) {
|
||
|
var item = cst.items[i];
|
||
|
|
||
|
switch (item.type) {
|
||
|
case Type.BLANK_LINE:
|
||
|
comments.push({
|
||
|
afterKey: !!key,
|
||
|
before: items.length
|
||
|
});
|
||
|
break;
|
||
|
|
||
|
case Type.COMMENT:
|
||
|
comments.push({
|
||
|
afterKey: !!key,
|
||
|
before: items.length,
|
||
|
comment: item.comment
|
||
|
});
|
||
|
break;
|
||
|
|
||
|
case Type.MAP_KEY:
|
||
|
if (key !== undefined) items.push(new Pair(key));
|
||
|
if (item.error) doc.errors.push(item.error);
|
||
|
key = resolveNode(doc, item.node);
|
||
|
keyStart = null;
|
||
|
break;
|
||
|
|
||
|
case Type.MAP_VALUE:
|
||
|
{
|
||
|
if (key === undefined) key = null;
|
||
|
if (item.error) doc.errors.push(item.error);
|
||
|
|
||
|
if (!item.context.atLineStart && item.node && item.node.type === Type.MAP && !item.node.context.atLineStart) {
|
||
|
var msg = 'Nested mappings are not allowed in compact mappings';
|
||
|
doc.errors.push(new YAMLSemanticError(item.node, msg));
|
||
|
}
|
||
|
|
||
|
var valueNode = item.node;
|
||
|
|
||
|
if (!valueNode && item.props.length > 0) {
|
||
|
// Comments on an empty mapping value need to be preserved, so we
|
||
|
// need to construct a minimal empty node here to use instead of the
|
||
|
// missing `item.node`. -- eemeli/yaml#19
|
||
|
valueNode = new PlainValue(Type.PLAIN, []);
|
||
|
valueNode.context = {
|
||
|
parent: item,
|
||
|
src: item.context.src
|
||
|
};
|
||
|
var pos = item.range.start + 1;
|
||
|
valueNode.range = {
|
||
|
start: pos,
|
||
|
end: pos
|
||
|
};
|
||
|
valueNode.valueRange = {
|
||
|
start: pos,
|
||
|
end: pos
|
||
|
};
|
||
|
|
||
|
if (typeof item.range.origStart === 'number') {
|
||
|
var origPos = item.range.origStart + 1;
|
||
|
valueNode.range.origStart = valueNode.range.origEnd = origPos;
|
||
|
valueNode.valueRange.origStart = valueNode.valueRange.origEnd = origPos;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
var pair = new Pair(key, resolveNode(doc, valueNode));
|
||
|
resolvePairComment(item, pair);
|
||
|
items.push(pair);
|
||
|
|
||
|
if (key && typeof keyStart === 'number') {
|
||
|
if (item.range.start > keyStart + 1024) doc.errors.push(getLongKeyError(cst, key));
|
||
|
}
|
||
|
|
||
|
key = undefined;
|
||
|
keyStart = null;
|
||
|
}
|
||
|
break;
|
||
|
|
||
|
default:
|
||
|
if (key !== undefined) items.push(new Pair(key));
|
||
|
key = resolveNode(doc, item);
|
||
|
keyStart = item.range.start;
|
||
|
if (item.error) doc.errors.push(item.error);
|
||
|
|
||
|
next: for (var j = i + 1;; ++j) {
|
||
|
var nextItem = cst.items[j];
|
||
|
|
||
|
switch (nextItem && nextItem.type) {
|
||
|
case Type.BLANK_LINE:
|
||
|
case Type.COMMENT:
|
||
|
continue next;
|
||
|
|
||
|
case Type.MAP_VALUE:
|
||
|
break next;
|
||
|
|
||
|
default:
|
||
|
{
|
||
|
var _msg2 = 'Implicit map keys need to be followed by map values';
|
||
|
doc.errors.push(new YAMLSemanticError(item, _msg2));
|
||
|
break next;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (item.valueRangeContainsNewline) {
|
||
|
var _msg3 = 'Implicit map keys need to be on a single line';
|
||
|
doc.errors.push(new YAMLSemanticError(item, _msg3));
|
||
|
}
|
||
|
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (key !== undefined) items.push(new Pair(key));
|
||
|
return {
|
||
|
comments: comments,
|
||
|
items: items
|
||
|
};
|
||
|
}
|
||
|
|
||
|
function resolveFlowMapItems(doc, cst) {
|
||
|
var comments = [];
|
||
|
var items = [];
|
||
|
var key = undefined;
|
||
|
var explicitKey = false;
|
||
|
var next = '{';
|
||
|
|
||
|
for (var i = 0; i < cst.items.length; ++i) {
|
||
|
var item = cst.items[i];
|
||
|
|
||
|
if (typeof item.char === 'string') {
|
||
|
var char = item.char,
|
||
|
offset = item.offset;
|
||
|
|
||
|
if (char === '?' && key === undefined && !explicitKey) {
|
||
|
explicitKey = true;
|
||
|
next = ':';
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
if (char === ':') {
|
||
|
if (key === undefined) key = null;
|
||
|
|
||
|
if (next === ':') {
|
||
|
next = ',';
|
||
|
continue;
|
||
|
}
|
||
|
} else {
|
||
|
if (explicitKey) {
|
||
|
if (key === undefined && char !== ',') key = null;
|
||
|
explicitKey = false;
|
||
|
}
|
||
|
|
||
|
if (key !== undefined) {
|
||
|
items.push(new Pair(key));
|
||
|
key = undefined;
|
||
|
|
||
|
if (char === ',') {
|
||
|
next = ':';
|
||
|
continue;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
if (char === '}') {
|
||
|
if (i === cst.items.length - 1) continue;
|
||
|
} else if (char === next) {
|
||
|
next = ':';
|
||
|
continue;
|
||
|
}
|
||
|
|
||
|
var msg = "Flow map contains an unexpected ".concat(char);
|
||
|
var err = new YAMLSyntaxError(cst, msg);
|
||
|
err.offset = offset;
|
||
|
doc.errors.push(err);
|
||
|
} else if (item.type === Type.BLANK_LINE) {
|
||
|
comments.push({
|
||
|
afterKey: !!key,
|
||
|
before: items.length
|
||
|
});
|
||
|
} else if (item.type === Type.COMMENT) {
|
||
|
checkFlowCommentSpace(doc.errors, item);
|
||
|
comments.push({
|
||
|
afterKey: !!key,
|
||
|
before: items.length,
|
||
|
comment: item.comment
|
||
|
});
|
||
|
} else if (key === undefined) {
|
||
|
if (next === ',') doc.errors.push(new YAMLSemanticError(item, 'Separator , missing in flow map'));
|
||
|
key = resolveNode(doc, item);
|
||
|
} else {
|
||
|
if (next !== ',') doc.errors.push(new YAMLSemanticError(item, 'Indicator : missing in flow map entry'));
|
||
|
items.push(new Pair(key, resolveNode(doc, item)));
|
||
|
key = undefined;
|
||
|
explicitKey = false;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
checkFlowCollectionEnd(doc.errors, cst);
|
||
|
if (key !== undefined) items.push(new Pair(key));
|
||
|
return {
|
||
|
comments: comments,
|
||
|
items: items
|
||
|
};
|
||
|
}
|
||
|
|
||
|
function resolveSeq(doc, cst) {
|
||
|
if (cst.type !== Type.SEQ && cst.type !== Type.FLOW_SEQ) {
|
||
|
var msg = "A ".concat(cst.type, " node cannot be resolved as a sequence");
|
||
|
doc.errors.push(new YAMLSyntaxError(cst, msg));
|
||
|
return null;
|
||
|
}
|
||
|
|
||
|
var _ref = cst.type === Type.FLOW_SEQ ? resolveFlowSeqItems(doc, cst) : resolveBlockSeqItems(doc, cst),
|
||
|
comments = _ref.comments,
|
||
|
items = _ref.items;
|
||
|
|
||
|
var seq = new YAMLSeq();
|
||
|
seq.items = items;
|
||
|
resolveComments(seq, comments);
|
||
|
|
||
|
if (!doc.options.mapAsMap && items.some(function (it) {
|
||
|
return it instanceof Pair && it.key instanceof Collection;
|
||
|
})) {
|
||
|
var warn = 'Keys with collection values will be stringified as YAML due to JS Object restrictions. Use mapAsMap: true to avoid this.';
|
||
|
doc.warnings.push(new YAMLWarning(cst, warn));
|
||
|
}
|
||
|
|
||
|
cst.resolved = seq;
|
||
|
return seq;
|
||
|
}
|
||
|
|
||
|
function resolveBlockSeqItems(doc, cst) {
|
||
|
var comments = [];
|
||
|
var items = [];
|
||
|
|
||
|
for (var i = 0; i < cst.items.length; ++i) {
|
||
|
var item = cst.items[i];
|
||
|
|
||
|
switch (item.type) {
|
||
|
case Type.BLANK_LINE:
|
||
|
comments.push({
|
||
|
before: items.length
|
||
|
});
|
||
|
break;
|
||
|
|
||
|
case Type.COMMENT:
|
||
|
comments.push({
|
||
|
comment: item.comment,
|
||
|
before: items.length
|
||
|
});
|
||
|
break;
|
||
|
|
||
|
case Type.SEQ_ITEM:
|
||
|
if (item.error) doc.errors.push(item.error);
|
||
|
items.push(resolveNode(doc, item.node));
|
||
|
|
||
|
if (item.hasProps) {
|
||
|
var msg = 'Sequence items cannot have tags or anchors before the - indicator';
|
||
|
doc.errors.push(new YAMLSemanticError(item, msg));
|
||
|
}
|
||
|
|
||
|
break;
|
||
|
|
||
|
default:
|
||
|
if (item.error) doc.errors.push(item.error);
|
||
|
doc.errors.push(new YAMLSyntaxError(item, "Unexpected ".concat(item.type, " node in sequence")));
|
||
|
}
|
||
|
}
|
||
|
|
||
|
return {
|
||
|
comments: comments,
|
||
|
items: items
|
||
|
};
|
||
|
}
|
||
|
|
||
|
function resolveFlowSeqItems(doc, cst) {
|
||
|
var comments = [];
|
||
|
var items = [];
|
||
|
var explicitKey = false;
|
||
|
var key = undefined;
|
||
|
var keyStart = null;
|
||
|
var next = '[';
|
||
|
var prevItem = null;
|
||
|
|
||
|
for (var i = 0; i < cst.items.length; ++i) {
|
||
|
var item = cst.items[i];
|
||
|
|
||
|
if (typeof item.char === 'string') {
|
||
|
var char = item.char,
|
||
|
offset = item.offset;
|
||
|
|
||
|
if (char !== ':' && (explicitKey || key !== undefined)) {
|
||
|
if (explicitKey && key === undefined) key = next ? items.pop() : null;
|
||
|
items.push(new Pair(key));
|
||
|
explicitKey = false;
|
||
|
key = undefined;
|
||
|
keyStart = null;
|
||
|
}
|
||
|
|
||
|
if (char === next) {
|
||
|
next = null;
|
||
|
} else if (!next && char === '?') {
|
||
|
explicitKey = true;
|
||
|
} else if (next !== '[' && char === ':' && key === undefined) {
|
||
|
if (next === ',') {
|
||
|
key = items.pop();
|
||
|
|
||
|
if (key instanceof Pair) {
|
||
|
var msg = 'Chaining flow sequence pairs is invalid';
|
||
|
var err = new YAMLSemanticError(cst, msg);
|
||
|
err.offset = offset;
|
||
|
doc.errors.push(err);
|
||
|
}
|
||
|
|
||
|
if (!explicitKey && typeof keyStart === 'number') {
|
||
|
var keyEnd = item.range ? item.range.start : item.offset;
|
||
|
if (keyEnd > keyStart + 1024) doc.errors.push(getLongKeyError(cst, key));
|
||
|
var src = prevItem.context.src;
|
||
|
|
||
|
for (var _i = keyStart; _i < keyEnd; ++_i) {
|
||
|
if (src[_i] === '\n') {
|
||
|
var _msg = 'Implicit keys of flow sequence pairs need to be on a single line';
|
||
|
doc.errors.push(new YAMLSemanticError(prevItem, _msg));
|
||
|
break;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
} else {
|
||
|
key = null;
|
||
|
}
|
||
|
|
||
|
keyStart = null;
|
||
|
explicitKey = false;
|
||
|
next = null;
|
||
|
} else if (next === '[' || char !== ']' || i < cst.items.length - 1) {
|
||
|
var _msg2 = "Flow sequence contains an unexpected ".concat(char);
|
||
|
|
||
|
var _err = new YAMLSyntaxError(cst, _msg2);
|
||
|
|
||
|
_err.offset = offset;
|
||
|
doc.errors.push(_err);
|
||
|
}
|
||
|
} else if (item.type === Type.BLANK_LINE) {
|
||
|
comments.push({
|
||
|
before: items.length
|
||
|
});
|
||
|
} else if (item.type === Type.COMMENT) {
|
||
|
checkFlowCommentSpace(doc.errors, item);
|
||
|
comments.push({
|
||
|
comment: item.comment,
|
||
|
before: items.length
|
||
|
});
|
||
|
} else {
|
||
|
if (next) {
|
||
|
var _msg3 = "Expected a ".concat(next, " in flow sequence");
|
||
|
|
||
|
doc.errors.push(new YAMLSemanticError(item, _msg3));
|
||
|
}
|
||
|
|
||
|
var value = resolveNode(doc, item);
|
||
|
|
||
|
if (key === undefined) {
|
||
|
items.push(value);
|
||
|
prevItem = item;
|
||
|
} else {
|
||
|
items.push(new Pair(key, value));
|
||
|
key = undefined;
|
||
|
}
|
||
|
|
||
|
keyStart = item.range.start;
|
||
|
next = ',';
|
||
|
}
|
||
|
}
|
||
|
|
||
|
checkFlowCollectionEnd(doc.errors, cst);
|
||
|
if (key !== undefined) items.push(new Pair(key));
|
||
|
return {
|
||
|
comments: comments,
|
||
|
items: items
|
||
|
};
|
||
|
}
|
||
|
|
||
|
export { Alias as A, Collection as C, Merge as M, Node as N, Pair as P, Scalar as S, YAMLSeq as Y, boolOptions as a, binaryOptions as b, stringifyString as c, YAMLMap as d, isEmptyPath as e, addComment as f, resolveMap as g, resolveSeq as h, intOptions as i, resolveString as j, stringifyNumber as k, findPair as l, nullOptions as n, resolveNode as r, strOptions as s, toJSON as t };
|