mirror of
https://github.com/deployphp/action.git
synced 2025-04-20 11:06:47 +00:00
Add node_modules
This commit is contained in:
parent
e1f786311a
commit
554eb0b122
994 changed files with 195567 additions and 0 deletions
59
node_modules/yaml/browser/dist/compose/compose-collection.js
generated
vendored
Normal file
59
node_modules/yaml/browser/dist/compose/compose-collection.js
generated
vendored
Normal file
|
@ -0,0 +1,59 @@
|
|||
import { isNode, isMap } from '../nodes/Node.js';
|
||||
import { Scalar } from '../nodes/Scalar.js';
|
||||
import { resolveBlockMap } from './resolve-block-map.js';
|
||||
import { resolveBlockSeq } from './resolve-block-seq.js';
|
||||
import { resolveFlowCollection } from './resolve-flow-collection.js';
|
||||
|
||||
function composeCollection(CN, ctx, token, tagToken, onError) {
|
||||
let coll;
|
||||
switch (token.type) {
|
||||
case 'block-map': {
|
||||
coll = resolveBlockMap(CN, ctx, token, onError);
|
||||
break;
|
||||
}
|
||||
case 'block-seq': {
|
||||
coll = resolveBlockSeq(CN, ctx, token, onError);
|
||||
break;
|
||||
}
|
||||
case 'flow-collection': {
|
||||
coll = resolveFlowCollection(CN, ctx, token, onError);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!tagToken)
|
||||
return coll;
|
||||
const tagName = ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));
|
||||
if (!tagName)
|
||||
return coll;
|
||||
// Cast needed due to: https://github.com/Microsoft/TypeScript/issues/3841
|
||||
const Coll = coll.constructor;
|
||||
if (tagName === '!' || tagName === Coll.tagName) {
|
||||
coll.tag = Coll.tagName;
|
||||
return coll;
|
||||
}
|
||||
const expType = isMap(coll) ? 'map' : 'seq';
|
||||
let tag = ctx.schema.tags.find(t => t.collection === expType && t.tag === tagName);
|
||||
if (!tag) {
|
||||
const kt = ctx.schema.knownTags[tagName];
|
||||
if (kt && kt.collection === expType) {
|
||||
ctx.schema.tags.push(Object.assign({}, kt, { default: false }));
|
||||
tag = kt;
|
||||
}
|
||||
else {
|
||||
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);
|
||||
coll.tag = tagName;
|
||||
return coll;
|
||||
}
|
||||
}
|
||||
const res = tag.resolve(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options);
|
||||
const node = isNode(res)
|
||||
? res
|
||||
: new Scalar(res);
|
||||
node.range = coll.range;
|
||||
node.tag = tagName;
|
||||
if (tag?.format)
|
||||
node.format = tag.format;
|
||||
return node;
|
||||
}
|
||||
|
||||
export { composeCollection };
|
40
node_modules/yaml/browser/dist/compose/compose-doc.js
generated
vendored
Normal file
40
node_modules/yaml/browser/dist/compose/compose-doc.js
generated
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
import { Document } from '../doc/Document.js';
|
||||
import { composeNode, composeEmptyNode } from './compose-node.js';
|
||||
import { resolveEnd } from './resolve-end.js';
|
||||
import { resolveProps } from './resolve-props.js';
|
||||
|
||||
function composeDoc(options, directives, { offset, start, value, end }, onError) {
|
||||
const opts = Object.assign({ _directives: directives }, options);
|
||||
const doc = new Document(undefined, opts);
|
||||
const ctx = {
|
||||
atRoot: true,
|
||||
directives: doc.directives,
|
||||
options: doc.options,
|
||||
schema: doc.schema
|
||||
};
|
||||
const props = resolveProps(start, {
|
||||
indicator: 'doc-start',
|
||||
next: value ?? end?.[0],
|
||||
offset,
|
||||
onError,
|
||||
startOnNewline: true
|
||||
});
|
||||
if (props.found) {
|
||||
doc.directives.docStart = true;
|
||||
if (value &&
|
||||
(value.type === 'block-map' || value.type === 'block-seq') &&
|
||||
!props.hasNewline)
|
||||
onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');
|
||||
}
|
||||
doc.contents = value
|
||||
? composeNode(ctx, value, props, onError)
|
||||
: composeEmptyNode(ctx, props.end, start, null, props, onError);
|
||||
const contentEnd = doc.contents.range[2];
|
||||
const re = resolveEnd(end, contentEnd, false, onError);
|
||||
if (re.comment)
|
||||
doc.comment = re.comment;
|
||||
doc.range = [offset, contentEnd, re.offset];
|
||||
return doc;
|
||||
}
|
||||
|
||||
export { composeDoc };
|
92
node_modules/yaml/browser/dist/compose/compose-node.js
generated
vendored
Normal file
92
node_modules/yaml/browser/dist/compose/compose-node.js
generated
vendored
Normal file
|
@ -0,0 +1,92 @@
|
|||
import { Alias } from '../nodes/Alias.js';
|
||||
import { composeCollection } from './compose-collection.js';
|
||||
import { composeScalar } from './compose-scalar.js';
|
||||
import { resolveEnd } from './resolve-end.js';
|
||||
import { emptyScalarPosition } from './util-empty-scalar-position.js';
|
||||
|
||||
const CN = { composeNode, composeEmptyNode };
|
||||
function composeNode(ctx, token, props, onError) {
|
||||
const { spaceBefore, comment, anchor, tag } = props;
|
||||
let node;
|
||||
let isSrcToken = true;
|
||||
switch (token.type) {
|
||||
case 'alias':
|
||||
node = composeAlias(ctx, token, onError);
|
||||
if (anchor || tag)
|
||||
onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');
|
||||
break;
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
case 'block-scalar':
|
||||
node = composeScalar(ctx, token, tag, onError);
|
||||
if (anchor)
|
||||
node.anchor = anchor.source.substring(1);
|
||||
break;
|
||||
case 'block-map':
|
||||
case 'block-seq':
|
||||
case 'flow-collection':
|
||||
node = composeCollection(CN, ctx, token, tag, onError);
|
||||
if (anchor)
|
||||
node.anchor = anchor.source.substring(1);
|
||||
break;
|
||||
default: {
|
||||
const message = token.type === 'error'
|
||||
? token.message
|
||||
: `Unsupported token (type: ${token.type})`;
|
||||
onError(token, 'UNEXPECTED_TOKEN', message);
|
||||
node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);
|
||||
isSrcToken = false;
|
||||
}
|
||||
}
|
||||
if (anchor && node.anchor === '')
|
||||
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
|
||||
if (spaceBefore)
|
||||
node.spaceBefore = true;
|
||||
if (comment) {
|
||||
if (token.type === 'scalar' && token.source === '')
|
||||
node.comment = comment;
|
||||
else
|
||||
node.commentBefore = comment;
|
||||
}
|
||||
// @ts-expect-error Type checking misses meaning of isSrcToken
|
||||
if (ctx.options.keepSourceTokens && isSrcToken)
|
||||
node.srcToken = token;
|
||||
return node;
|
||||
}
|
||||
function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) {
|
||||
const token = {
|
||||
type: 'scalar',
|
||||
offset: emptyScalarPosition(offset, before, pos),
|
||||
indent: -1,
|
||||
source: ''
|
||||
};
|
||||
const node = composeScalar(ctx, token, tag, onError);
|
||||
if (anchor) {
|
||||
node.anchor = anchor.source.substring(1);
|
||||
if (node.anchor === '')
|
||||
onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');
|
||||
}
|
||||
if (spaceBefore)
|
||||
node.spaceBefore = true;
|
||||
if (comment) {
|
||||
node.comment = comment;
|
||||
node.range[2] = end;
|
||||
}
|
||||
return node;
|
||||
}
|
||||
function composeAlias({ options }, { offset, source, end }, onError) {
|
||||
const alias = new Alias(source.substring(1));
|
||||
if (alias.source === '')
|
||||
onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');
|
||||
if (alias.source.endsWith(':'))
|
||||
onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);
|
||||
const valueEnd = offset + source.length;
|
||||
const re = resolveEnd(end, valueEnd, options.strict, onError);
|
||||
alias.range = [offset, valueEnd, re.offset];
|
||||
if (re.comment)
|
||||
alias.comment = re.comment;
|
||||
return alias;
|
||||
}
|
||||
|
||||
export { composeEmptyNode, composeNode };
|
80
node_modules/yaml/browser/dist/compose/compose-scalar.js
generated
vendored
Normal file
80
node_modules/yaml/browser/dist/compose/compose-scalar.js
generated
vendored
Normal file
|
@ -0,0 +1,80 @@
|
|||
import { SCALAR, isScalar } from '../nodes/Node.js';
|
||||
import { Scalar } from '../nodes/Scalar.js';
|
||||
import { resolveBlockScalar } from './resolve-block-scalar.js';
|
||||
import { resolveFlowScalar } from './resolve-flow-scalar.js';
|
||||
|
||||
function composeScalar(ctx, token, tagToken, onError) {
|
||||
const { value, type, comment, range } = token.type === 'block-scalar'
|
||||
? resolveBlockScalar(token, ctx.options.strict, onError)
|
||||
: resolveFlowScalar(token, ctx.options.strict, onError);
|
||||
const tagName = tagToken
|
||||
? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))
|
||||
: null;
|
||||
const tag = tagToken && tagName
|
||||
? findScalarTagByName(ctx.schema, value, tagName, tagToken, onError)
|
||||
: token.type === 'scalar'
|
||||
? findScalarTagByTest(ctx, value, token, onError)
|
||||
: ctx.schema[SCALAR];
|
||||
let scalar;
|
||||
try {
|
||||
const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);
|
||||
scalar = isScalar(res) ? res : new Scalar(res);
|
||||
}
|
||||
catch (error) {
|
||||
const msg = error instanceof Error ? error.message : String(error);
|
||||
onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);
|
||||
scalar = new Scalar(value);
|
||||
}
|
||||
scalar.range = range;
|
||||
scalar.source = value;
|
||||
if (type)
|
||||
scalar.type = type;
|
||||
if (tagName)
|
||||
scalar.tag = tagName;
|
||||
if (tag.format)
|
||||
scalar.format = tag.format;
|
||||
if (comment)
|
||||
scalar.comment = comment;
|
||||
return scalar;
|
||||
}
|
||||
function findScalarTagByName(schema, value, tagName, tagToken, onError) {
|
||||
if (tagName === '!')
|
||||
return schema[SCALAR]; // non-specific tag
|
||||
const matchWithTest = [];
|
||||
for (const tag of schema.tags) {
|
||||
if (!tag.collection && tag.tag === tagName) {
|
||||
if (tag.default && tag.test)
|
||||
matchWithTest.push(tag);
|
||||
else
|
||||
return tag;
|
||||
}
|
||||
}
|
||||
for (const tag of matchWithTest)
|
||||
if (tag.test?.test(value))
|
||||
return tag;
|
||||
const kt = schema.knownTags[tagName];
|
||||
if (kt && !kt.collection) {
|
||||
// Ensure that the known tag is available for stringifying,
|
||||
// but does not get used by default.
|
||||
schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));
|
||||
return kt;
|
||||
}
|
||||
onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');
|
||||
return schema[SCALAR];
|
||||
}
|
||||
function findScalarTagByTest({ directives, schema }, value, token, onError) {
|
||||
const tag = schema.tags.find(tag => tag.default && tag.test?.test(value)) || schema[SCALAR];
|
||||
if (schema.compat) {
|
||||
const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??
|
||||
schema[SCALAR];
|
||||
if (tag.tag !== compat.tag) {
|
||||
const ts = directives.tagString(tag.tag);
|
||||
const cs = directives.tagString(compat.tag);
|
||||
const msg = `Value may be parsed as either ${ts} or ${cs}`;
|
||||
onError(token, 'TAG_RESOLVE_FAILED', msg, true);
|
||||
}
|
||||
}
|
||||
return tag;
|
||||
}
|
||||
|
||||
export { composeScalar };
|
217
node_modules/yaml/browser/dist/compose/composer.js
generated
vendored
Normal file
217
node_modules/yaml/browser/dist/compose/composer.js
generated
vendored
Normal file
|
@ -0,0 +1,217 @@
|
|||
import { Directives } from '../doc/directives.js';
|
||||
import { Document } from '../doc/Document.js';
|
||||
import { YAMLWarning, YAMLParseError } from '../errors.js';
|
||||
import { isCollection, isPair } from '../nodes/Node.js';
|
||||
import { composeDoc } from './compose-doc.js';
|
||||
import { resolveEnd } from './resolve-end.js';
|
||||
|
||||
function getErrorPos(src) {
|
||||
if (typeof src === 'number')
|
||||
return [src, src + 1];
|
||||
if (Array.isArray(src))
|
||||
return src.length === 2 ? src : [src[0], src[1]];
|
||||
const { offset, source } = src;
|
||||
return [offset, offset + (typeof source === 'string' ? source.length : 1)];
|
||||
}
|
||||
function parsePrelude(prelude) {
|
||||
let comment = '';
|
||||
let atComment = false;
|
||||
let afterEmptyLine = false;
|
||||
for (let i = 0; i < prelude.length; ++i) {
|
||||
const source = prelude[i];
|
||||
switch (source[0]) {
|
||||
case '#':
|
||||
comment +=
|
||||
(comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') +
|
||||
(source.substring(1) || ' ');
|
||||
atComment = true;
|
||||
afterEmptyLine = false;
|
||||
break;
|
||||
case '%':
|
||||
if (prelude[i + 1]?.[0] !== '#')
|
||||
i += 1;
|
||||
atComment = false;
|
||||
break;
|
||||
default:
|
||||
// This may be wrong after doc-end, but in that case it doesn't matter
|
||||
if (!atComment)
|
||||
afterEmptyLine = true;
|
||||
atComment = false;
|
||||
}
|
||||
}
|
||||
return { comment, afterEmptyLine };
|
||||
}
|
||||
/**
|
||||
* Compose a stream of CST nodes into a stream of YAML Documents.
|
||||
*
|
||||
* ```ts
|
||||
* import { Composer, Parser } from 'yaml'
|
||||
*
|
||||
* const src: string = ...
|
||||
* const tokens = new Parser().parse(src)
|
||||
* const docs = new Composer().compose(tokens)
|
||||
* ```
|
||||
*/
|
||||
class Composer {
|
||||
constructor(options = {}) {
|
||||
this.doc = null;
|
||||
this.atDirectives = false;
|
||||
this.prelude = [];
|
||||
this.errors = [];
|
||||
this.warnings = [];
|
||||
this.onError = (source, code, message, warning) => {
|
||||
const pos = getErrorPos(source);
|
||||
if (warning)
|
||||
this.warnings.push(new YAMLWarning(pos, code, message));
|
||||
else
|
||||
this.errors.push(new YAMLParseError(pos, code, message));
|
||||
};
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
this.directives = new Directives({ version: options.version || '1.2' });
|
||||
this.options = options;
|
||||
}
|
||||
decorate(doc, afterDoc) {
|
||||
const { comment, afterEmptyLine } = parsePrelude(this.prelude);
|
||||
//console.log({ dc: doc.comment, prelude, comment })
|
||||
if (comment) {
|
||||
const dc = doc.contents;
|
||||
if (afterDoc) {
|
||||
doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment;
|
||||
}
|
||||
else if (afterEmptyLine || doc.directives.docStart || !dc) {
|
||||
doc.commentBefore = comment;
|
||||
}
|
||||
else if (isCollection(dc) && !dc.flow && dc.items.length > 0) {
|
||||
let it = dc.items[0];
|
||||
if (isPair(it))
|
||||
it = it.key;
|
||||
const cb = it.commentBefore;
|
||||
it.commentBefore = cb ? `${comment}\n${cb}` : comment;
|
||||
}
|
||||
else {
|
||||
const cb = dc.commentBefore;
|
||||
dc.commentBefore = cb ? `${comment}\n${cb}` : comment;
|
||||
}
|
||||
}
|
||||
if (afterDoc) {
|
||||
Array.prototype.push.apply(doc.errors, this.errors);
|
||||
Array.prototype.push.apply(doc.warnings, this.warnings);
|
||||
}
|
||||
else {
|
||||
doc.errors = this.errors;
|
||||
doc.warnings = this.warnings;
|
||||
}
|
||||
this.prelude = [];
|
||||
this.errors = [];
|
||||
this.warnings = [];
|
||||
}
|
||||
/**
|
||||
* Current stream status information.
|
||||
*
|
||||
* Mostly useful at the end of input for an empty stream.
|
||||
*/
|
||||
streamInfo() {
|
||||
return {
|
||||
comment: parsePrelude(this.prelude).comment,
|
||||
directives: this.directives,
|
||||
errors: this.errors,
|
||||
warnings: this.warnings
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Compose tokens into documents.
|
||||
*
|
||||
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
|
||||
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
|
||||
*/
|
||||
*compose(tokens, forceDoc = false, endOffset = -1) {
|
||||
for (const token of tokens)
|
||||
yield* this.next(token);
|
||||
yield* this.end(forceDoc, endOffset);
|
||||
}
|
||||
/** Advance the composer by one CST token. */
|
||||
*next(token) {
|
||||
switch (token.type) {
|
||||
case 'directive':
|
||||
this.directives.add(token.source, (offset, message, warning) => {
|
||||
const pos = getErrorPos(token);
|
||||
pos[0] += offset;
|
||||
this.onError(pos, 'BAD_DIRECTIVE', message, warning);
|
||||
});
|
||||
this.prelude.push(token.source);
|
||||
this.atDirectives = true;
|
||||
break;
|
||||
case 'document': {
|
||||
const doc = composeDoc(this.options, this.directives, token, this.onError);
|
||||
if (this.atDirectives && !doc.directives.docStart)
|
||||
this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');
|
||||
this.decorate(doc, false);
|
||||
if (this.doc)
|
||||
yield this.doc;
|
||||
this.doc = doc;
|
||||
this.atDirectives = false;
|
||||
break;
|
||||
}
|
||||
case 'byte-order-mark':
|
||||
case 'space':
|
||||
break;
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
this.prelude.push(token.source);
|
||||
break;
|
||||
case 'error': {
|
||||
const msg = token.source
|
||||
? `${token.message}: ${JSON.stringify(token.source)}`
|
||||
: token.message;
|
||||
const error = new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);
|
||||
if (this.atDirectives || !this.doc)
|
||||
this.errors.push(error);
|
||||
else
|
||||
this.doc.errors.push(error);
|
||||
break;
|
||||
}
|
||||
case 'doc-end': {
|
||||
if (!this.doc) {
|
||||
const msg = 'Unexpected doc-end without preceding document';
|
||||
this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));
|
||||
break;
|
||||
}
|
||||
this.doc.directives.docEnd = true;
|
||||
const end = resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);
|
||||
this.decorate(this.doc, true);
|
||||
if (end.comment) {
|
||||
const dc = this.doc.comment;
|
||||
this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment;
|
||||
}
|
||||
this.doc.range[2] = end.offset;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
this.errors.push(new YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Call at end of input to yield any remaining document.
|
||||
*
|
||||
* @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.
|
||||
* @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.
|
||||
*/
|
||||
*end(forceDoc = false, endOffset = -1) {
|
||||
if (this.doc) {
|
||||
this.decorate(this.doc, true);
|
||||
yield this.doc;
|
||||
this.doc = null;
|
||||
}
|
||||
else if (forceDoc) {
|
||||
const opts = Object.assign({ _directives: this.directives }, this.options);
|
||||
const doc = new Document(undefined, opts);
|
||||
if (this.atDirectives)
|
||||
this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');
|
||||
doc.range = [0, endOffset, endOffset];
|
||||
this.decorate(doc, false);
|
||||
yield doc;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { Composer };
|
110
node_modules/yaml/browser/dist/compose/resolve-block-map.js
generated
vendored
Normal file
110
node_modules/yaml/browser/dist/compose/resolve-block-map.js
generated
vendored
Normal file
|
@ -0,0 +1,110 @@
|
|||
import { Pair } from '../nodes/Pair.js';
|
||||
import { YAMLMap } from '../nodes/YAMLMap.js';
|
||||
import { resolveProps } from './resolve-props.js';
|
||||
import { containsNewline } from './util-contains-newline.js';
|
||||
import { flowIndentCheck } from './util-flow-indent-check.js';
|
||||
import { mapIncludes } from './util-map-includes.js';
|
||||
|
||||
const startColMsg = 'All mapping items must start at the same column';
|
||||
function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError) {
|
||||
const map = new YAMLMap(ctx.schema);
|
||||
if (ctx.atRoot)
|
||||
ctx.atRoot = false;
|
||||
let offset = bm.offset;
|
||||
let commentEnd = null;
|
||||
for (const collItem of bm.items) {
|
||||
const { start, key, sep, value } = collItem;
|
||||
// key properties
|
||||
const keyProps = resolveProps(start, {
|
||||
indicator: 'explicit-key-ind',
|
||||
next: key ?? sep?.[0],
|
||||
offset,
|
||||
onError,
|
||||
startOnNewline: true
|
||||
});
|
||||
const implicitKey = !keyProps.found;
|
||||
if (implicitKey) {
|
||||
if (key) {
|
||||
if (key.type === 'block-seq')
|
||||
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');
|
||||
else if ('indent' in key && key.indent !== bm.indent)
|
||||
onError(offset, 'BAD_INDENT', startColMsg);
|
||||
}
|
||||
if (!keyProps.anchor && !keyProps.tag && !sep) {
|
||||
commentEnd = keyProps.end;
|
||||
if (keyProps.comment) {
|
||||
if (map.comment)
|
||||
map.comment += '\n' + keyProps.comment;
|
||||
else
|
||||
map.comment = keyProps.comment;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (keyProps.hasNewlineAfterProp || containsNewline(key)) {
|
||||
onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');
|
||||
}
|
||||
}
|
||||
else if (keyProps.found?.indent !== bm.indent) {
|
||||
onError(offset, 'BAD_INDENT', startColMsg);
|
||||
}
|
||||
// key value
|
||||
const keyStart = keyProps.end;
|
||||
const keyNode = key
|
||||
? composeNode(ctx, key, keyProps, onError)
|
||||
: composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);
|
||||
if (ctx.schema.compat)
|
||||
flowIndentCheck(bm.indent, key, onError);
|
||||
if (mapIncludes(ctx, map.items, keyNode))
|
||||
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
|
||||
// value properties
|
||||
const valueProps = resolveProps(sep ?? [], {
|
||||
indicator: 'map-value-ind',
|
||||
next: value,
|
||||
offset: keyNode.range[2],
|
||||
onError,
|
||||
startOnNewline: !key || key.type === 'block-scalar'
|
||||
});
|
||||
offset = valueProps.end;
|
||||
if (valueProps.found) {
|
||||
if (implicitKey) {
|
||||
if (value?.type === 'block-map' && !valueProps.hasNewline)
|
||||
onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');
|
||||
if (ctx.options.strict &&
|
||||
keyProps.start < valueProps.found.offset - 1024)
|
||||
onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');
|
||||
}
|
||||
// value value
|
||||
const valueNode = value
|
||||
? composeNode(ctx, value, valueProps, onError)
|
||||
: composeEmptyNode(ctx, offset, sep, null, valueProps, onError);
|
||||
if (ctx.schema.compat)
|
||||
flowIndentCheck(bm.indent, value, onError);
|
||||
offset = valueNode.range[2];
|
||||
const pair = new Pair(keyNode, valueNode);
|
||||
if (ctx.options.keepSourceTokens)
|
||||
pair.srcToken = collItem;
|
||||
map.items.push(pair);
|
||||
}
|
||||
else {
|
||||
// key with no value
|
||||
if (implicitKey)
|
||||
onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');
|
||||
if (valueProps.comment) {
|
||||
if (keyNode.comment)
|
||||
keyNode.comment += '\n' + valueProps.comment;
|
||||
else
|
||||
keyNode.comment = valueProps.comment;
|
||||
}
|
||||
const pair = new Pair(keyNode);
|
||||
if (ctx.options.keepSourceTokens)
|
||||
pair.srcToken = collItem;
|
||||
map.items.push(pair);
|
||||
}
|
||||
}
|
||||
if (commentEnd && commentEnd < offset)
|
||||
onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content');
|
||||
map.range = [bm.offset, offset, commentEnd ?? offset];
|
||||
return map;
|
||||
}
|
||||
|
||||
export { resolveBlockMap };
|
194
node_modules/yaml/browser/dist/compose/resolve-block-scalar.js
generated
vendored
Normal file
194
node_modules/yaml/browser/dist/compose/resolve-block-scalar.js
generated
vendored
Normal file
|
@ -0,0 +1,194 @@
|
|||
import { Scalar } from '../nodes/Scalar.js';
|
||||
|
||||
function resolveBlockScalar(scalar, strict, onError) {
|
||||
const start = scalar.offset;
|
||||
const header = parseBlockScalarHeader(scalar, strict, onError);
|
||||
if (!header)
|
||||
return { value: '', type: null, comment: '', range: [start, start, start] };
|
||||
const type = header.mode === '>' ? Scalar.BLOCK_FOLDED : Scalar.BLOCK_LITERAL;
|
||||
const lines = scalar.source ? splitLines(scalar.source) : [];
|
||||
// determine the end of content & start of chomping
|
||||
let chompStart = lines.length;
|
||||
for (let i = lines.length - 1; i >= 0; --i) {
|
||||
const content = lines[i][1];
|
||||
if (content === '' || content === '\r')
|
||||
chompStart = i;
|
||||
else
|
||||
break;
|
||||
}
|
||||
// shortcut for empty contents
|
||||
if (chompStart === 0) {
|
||||
const value = header.chomp === '+' && lines.length > 0
|
||||
? '\n'.repeat(Math.max(1, lines.length - 1))
|
||||
: '';
|
||||
let end = start + header.length;
|
||||
if (scalar.source)
|
||||
end += scalar.source.length;
|
||||
return { value, type, comment: header.comment, range: [start, end, end] };
|
||||
}
|
||||
// find the indentation level to trim from start
|
||||
let trimIndent = scalar.indent + header.indent;
|
||||
let offset = scalar.offset + header.length;
|
||||
let contentStart = 0;
|
||||
for (let i = 0; i < chompStart; ++i) {
|
||||
const [indent, content] = lines[i];
|
||||
if (content === '' || content === '\r') {
|
||||
if (header.indent === 0 && indent.length > trimIndent)
|
||||
trimIndent = indent.length;
|
||||
}
|
||||
else {
|
||||
if (indent.length < trimIndent) {
|
||||
const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';
|
||||
onError(offset + indent.length, 'MISSING_CHAR', message);
|
||||
}
|
||||
if (header.indent === 0)
|
||||
trimIndent = indent.length;
|
||||
contentStart = i;
|
||||
break;
|
||||
}
|
||||
offset += indent.length + content.length + 1;
|
||||
}
|
||||
// include trailing more-indented empty lines in content
|
||||
for (let i = lines.length - 1; i >= chompStart; --i) {
|
||||
if (lines[i][0].length > trimIndent)
|
||||
chompStart = i + 1;
|
||||
}
|
||||
let value = '';
|
||||
let sep = '';
|
||||
let prevMoreIndented = false;
|
||||
// leading whitespace is kept intact
|
||||
for (let i = 0; i < contentStart; ++i)
|
||||
value += lines[i][0].slice(trimIndent) + '\n';
|
||||
for (let i = contentStart; i < chompStart; ++i) {
|
||||
let [indent, content] = lines[i];
|
||||
offset += indent.length + content.length + 1;
|
||||
const crlf = content[content.length - 1] === '\r';
|
||||
if (crlf)
|
||||
content = content.slice(0, -1);
|
||||
/* istanbul ignore if already caught in lexer */
|
||||
if (content && indent.length < trimIndent) {
|
||||
const src = header.indent
|
||||
? 'explicit indentation indicator'
|
||||
: 'first line';
|
||||
const message = `Block scalar lines must not be less indented than their ${src}`;
|
||||
onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);
|
||||
indent = '';
|
||||
}
|
||||
if (type === Scalar.BLOCK_LITERAL) {
|
||||
value += sep + indent.slice(trimIndent) + content;
|
||||
sep = '\n';
|
||||
}
|
||||
else if (indent.length > trimIndent || content[0] === '\t') {
|
||||
// more-indented content within a folded block
|
||||
if (sep === ' ')
|
||||
sep = '\n';
|
||||
else if (!prevMoreIndented && sep === '\n')
|
||||
sep = '\n\n';
|
||||
value += sep + indent.slice(trimIndent) + content;
|
||||
sep = '\n';
|
||||
prevMoreIndented = true;
|
||||
}
|
||||
else if (content === '') {
|
||||
// empty line
|
||||
if (sep === '\n')
|
||||
value += '\n';
|
||||
else
|
||||
sep = '\n';
|
||||
}
|
||||
else {
|
||||
value += sep + content;
|
||||
sep = ' ';
|
||||
prevMoreIndented = false;
|
||||
}
|
||||
}
|
||||
switch (header.chomp) {
|
||||
case '-':
|
||||
break;
|
||||
case '+':
|
||||
for (let i = chompStart; i < lines.length; ++i)
|
||||
value += '\n' + lines[i][0].slice(trimIndent);
|
||||
if (value[value.length - 1] !== '\n')
|
||||
value += '\n';
|
||||
break;
|
||||
default:
|
||||
value += '\n';
|
||||
}
|
||||
const end = start + header.length + scalar.source.length;
|
||||
return { value, type, comment: header.comment, range: [start, end, end] };
|
||||
}
|
||||
function parseBlockScalarHeader({ offset, props }, strict, onError) {
|
||||
/* istanbul ignore if should not happen */
|
||||
if (props[0].type !== 'block-scalar-header') {
|
||||
onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');
|
||||
return null;
|
||||
}
|
||||
const { source } = props[0];
|
||||
const mode = source[0];
|
||||
let indent = 0;
|
||||
let chomp = '';
|
||||
let error = -1;
|
||||
for (let i = 1; i < source.length; ++i) {
|
||||
const ch = source[i];
|
||||
if (!chomp && (ch === '-' || ch === '+'))
|
||||
chomp = ch;
|
||||
else {
|
||||
const n = Number(ch);
|
||||
if (!indent && n)
|
||||
indent = n;
|
||||
else if (error === -1)
|
||||
error = offset + i;
|
||||
}
|
||||
}
|
||||
if (error !== -1)
|
||||
onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);
|
||||
let hasSpace = false;
|
||||
let comment = '';
|
||||
let length = source.length;
|
||||
for (let i = 1; i < props.length; ++i) {
|
||||
const token = props[i];
|
||||
switch (token.type) {
|
||||
case 'space':
|
||||
hasSpace = true;
|
||||
// fallthrough
|
||||
case 'newline':
|
||||
length += token.source.length;
|
||||
break;
|
||||
case 'comment':
|
||||
if (strict && !hasSpace) {
|
||||
const message = 'Comments must be separated from other tokens by white space characters';
|
||||
onError(token, 'MISSING_CHAR', message);
|
||||
}
|
||||
length += token.source.length;
|
||||
comment = token.source.substring(1);
|
||||
break;
|
||||
case 'error':
|
||||
onError(token, 'UNEXPECTED_TOKEN', token.message);
|
||||
length += token.source.length;
|
||||
break;
|
||||
/* istanbul ignore next should not happen */
|
||||
default: {
|
||||
const message = `Unexpected token in block scalar header: ${token.type}`;
|
||||
onError(token, 'UNEXPECTED_TOKEN', message);
|
||||
const ts = token.source;
|
||||
if (ts && typeof ts === 'string')
|
||||
length += ts.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
return { mode, indent, chomp, comment, length };
|
||||
}
|
||||
/** @returns Array of lines split up as `[indent, content]` */
|
||||
function splitLines(source) {
|
||||
const split = source.split(/\n( *)/);
|
||||
const first = split[0];
|
||||
const m = first.match(/^( *)/);
|
||||
const line0 = m?.[1]
|
||||
? [m[1], first.slice(m[1].length)]
|
||||
: ['', first];
|
||||
const lines = [line0];
|
||||
for (let i = 1; i < split.length; i += 2)
|
||||
lines.push([split[i], split[i + 1]]);
|
||||
return lines;
|
||||
}
|
||||
|
||||
export { resolveBlockScalar };
|
45
node_modules/yaml/browser/dist/compose/resolve-block-seq.js
generated
vendored
Normal file
45
node_modules/yaml/browser/dist/compose/resolve-block-seq.js
generated
vendored
Normal file
|
@ -0,0 +1,45 @@
|
|||
import { YAMLSeq } from '../nodes/YAMLSeq.js';
|
||||
import { resolveProps } from './resolve-props.js';
|
||||
import { flowIndentCheck } from './util-flow-indent-check.js';
|
||||
|
||||
function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError) {
|
||||
const seq = new YAMLSeq(ctx.schema);
|
||||
if (ctx.atRoot)
|
||||
ctx.atRoot = false;
|
||||
let offset = bs.offset;
|
||||
let commentEnd = null;
|
||||
for (const { start, value } of bs.items) {
|
||||
const props = resolveProps(start, {
|
||||
indicator: 'seq-item-ind',
|
||||
next: value,
|
||||
offset,
|
||||
onError,
|
||||
startOnNewline: true
|
||||
});
|
||||
if (!props.found) {
|
||||
if (props.anchor || props.tag || value) {
|
||||
if (value && value.type === 'block-seq')
|
||||
onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column');
|
||||
else
|
||||
onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');
|
||||
}
|
||||
else {
|
||||
commentEnd = props.end;
|
||||
if (props.comment)
|
||||
seq.comment = props.comment;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
const node = value
|
||||
? composeNode(ctx, value, props, onError)
|
||||
: composeEmptyNode(ctx, props.end, start, null, props, onError);
|
||||
if (ctx.schema.compat)
|
||||
flowIndentCheck(bs.indent, value, onError);
|
||||
offset = node.range[2];
|
||||
seq.items.push(node);
|
||||
}
|
||||
seq.range = [bs.offset, offset, commentEnd ?? offset];
|
||||
return seq;
|
||||
}
|
||||
|
||||
export { resolveBlockSeq };
|
37
node_modules/yaml/browser/dist/compose/resolve-end.js
generated
vendored
Normal file
37
node_modules/yaml/browser/dist/compose/resolve-end.js
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
function resolveEnd(end, offset, reqSpace, onError) {
|
||||
let comment = '';
|
||||
if (end) {
|
||||
let hasSpace = false;
|
||||
let sep = '';
|
||||
for (const token of end) {
|
||||
const { source, type } = token;
|
||||
switch (type) {
|
||||
case 'space':
|
||||
hasSpace = true;
|
||||
break;
|
||||
case 'comment': {
|
||||
if (reqSpace && !hasSpace)
|
||||
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
|
||||
const cb = source.substring(1) || ' ';
|
||||
if (!comment)
|
||||
comment = cb;
|
||||
else
|
||||
comment += sep + cb;
|
||||
sep = '';
|
||||
break;
|
||||
}
|
||||
case 'newline':
|
||||
if (comment)
|
||||
sep += source;
|
||||
hasSpace = true;
|
||||
break;
|
||||
default:
|
||||
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);
|
||||
}
|
||||
offset += source.length;
|
||||
}
|
||||
}
|
||||
return { comment, offset };
|
||||
}
|
||||
|
||||
export { resolveEnd };
|
200
node_modules/yaml/browser/dist/compose/resolve-flow-collection.js
generated
vendored
Normal file
200
node_modules/yaml/browser/dist/compose/resolve-flow-collection.js
generated
vendored
Normal file
|
@ -0,0 +1,200 @@
|
|||
import { isPair } from '../nodes/Node.js';
|
||||
import { Pair } from '../nodes/Pair.js';
|
||||
import { YAMLMap } from '../nodes/YAMLMap.js';
|
||||
import { YAMLSeq } from '../nodes/YAMLSeq.js';
|
||||
import { resolveEnd } from './resolve-end.js';
|
||||
import { resolveProps } from './resolve-props.js';
|
||||
import { containsNewline } from './util-contains-newline.js';
|
||||
import { mapIncludes } from './util-map-includes.js';
|
||||
|
||||
const blockMsg = 'Block collections are not allowed within flow collections';
|
||||
const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');
|
||||
function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError) {
|
||||
const isMap = fc.start.source === '{';
|
||||
const fcName = isMap ? 'flow map' : 'flow sequence';
|
||||
const coll = isMap
|
||||
? new YAMLMap(ctx.schema)
|
||||
: new YAMLSeq(ctx.schema);
|
||||
coll.flow = true;
|
||||
const atRoot = ctx.atRoot;
|
||||
if (atRoot)
|
||||
ctx.atRoot = false;
|
||||
let offset = fc.offset + fc.start.source.length;
|
||||
for (let i = 0; i < fc.items.length; ++i) {
|
||||
const collItem = fc.items[i];
|
||||
const { start, key, sep, value } = collItem;
|
||||
const props = resolveProps(start, {
|
||||
flow: fcName,
|
||||
indicator: 'explicit-key-ind',
|
||||
next: key ?? sep?.[0],
|
||||
offset,
|
||||
onError,
|
||||
startOnNewline: false
|
||||
});
|
||||
if (!props.found) {
|
||||
if (!props.anchor && !props.tag && !sep && !value) {
|
||||
if (i === 0 && props.comma)
|
||||
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
|
||||
else if (i < fc.items.length - 1)
|
||||
onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);
|
||||
if (props.comment) {
|
||||
if (coll.comment)
|
||||
coll.comment += '\n' + props.comment;
|
||||
else
|
||||
coll.comment = props.comment;
|
||||
}
|
||||
offset = props.end;
|
||||
continue;
|
||||
}
|
||||
if (!isMap && ctx.options.strict && containsNewline(key))
|
||||
onError(key, // checked by containsNewline()
|
||||
'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
|
||||
}
|
||||
if (i === 0) {
|
||||
if (props.comma)
|
||||
onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);
|
||||
}
|
||||
else {
|
||||
if (!props.comma)
|
||||
onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);
|
||||
if (props.comment) {
|
||||
let prevItemComment = '';
|
||||
loop: for (const st of start) {
|
||||
switch (st.type) {
|
||||
case 'comma':
|
||||
case 'space':
|
||||
break;
|
||||
case 'comment':
|
||||
prevItemComment = st.source.substring(1);
|
||||
break loop;
|
||||
default:
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
if (prevItemComment) {
|
||||
let prev = coll.items[coll.items.length - 1];
|
||||
if (isPair(prev))
|
||||
prev = prev.value ?? prev.key;
|
||||
if (prev.comment)
|
||||
prev.comment += '\n' + prevItemComment;
|
||||
else
|
||||
prev.comment = prevItemComment;
|
||||
props.comment = props.comment.substring(prevItemComment.length + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!isMap && !sep && !props.found) {
|
||||
// item is a value in a seq
|
||||
// → key & sep are empty, start does not include ? or :
|
||||
const valueNode = value
|
||||
? composeNode(ctx, value, props, onError)
|
||||
: composeEmptyNode(ctx, props.end, sep, null, props, onError);
|
||||
coll.items.push(valueNode);
|
||||
offset = valueNode.range[2];
|
||||
if (isBlock(value))
|
||||
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
|
||||
}
|
||||
else {
|
||||
// item is a key+value pair
|
||||
// key value
|
||||
const keyStart = props.end;
|
||||
const keyNode = key
|
||||
? composeNode(ctx, key, props, onError)
|
||||
: composeEmptyNode(ctx, keyStart, start, null, props, onError);
|
||||
if (isBlock(key))
|
||||
onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);
|
||||
// value properties
|
||||
const valueProps = resolveProps(sep ?? [], {
|
||||
flow: fcName,
|
||||
indicator: 'map-value-ind',
|
||||
next: value,
|
||||
offset: keyNode.range[2],
|
||||
onError,
|
||||
startOnNewline: false
|
||||
});
|
||||
if (valueProps.found) {
|
||||
if (!isMap && !props.found && ctx.options.strict) {
|
||||
if (sep)
|
||||
for (const st of sep) {
|
||||
if (st === valueProps.found)
|
||||
break;
|
||||
if (st.type === 'newline') {
|
||||
onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (props.start < valueProps.found.offset - 1024)
|
||||
onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');
|
||||
}
|
||||
}
|
||||
else if (value) {
|
||||
if ('source' in value && value.source && value.source[0] === ':')
|
||||
onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);
|
||||
else
|
||||
onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);
|
||||
}
|
||||
// value value
|
||||
const valueNode = value
|
||||
? composeNode(ctx, value, valueProps, onError)
|
||||
: valueProps.found
|
||||
? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)
|
||||
: null;
|
||||
if (valueNode) {
|
||||
if (isBlock(value))
|
||||
onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);
|
||||
}
|
||||
else if (valueProps.comment) {
|
||||
if (keyNode.comment)
|
||||
keyNode.comment += '\n' + valueProps.comment;
|
||||
else
|
||||
keyNode.comment = valueProps.comment;
|
||||
}
|
||||
const pair = new Pair(keyNode, valueNode);
|
||||
if (ctx.options.keepSourceTokens)
|
||||
pair.srcToken = collItem;
|
||||
if (isMap) {
|
||||
const map = coll;
|
||||
if (mapIncludes(ctx, map.items, keyNode))
|
||||
onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');
|
||||
map.items.push(pair);
|
||||
}
|
||||
else {
|
||||
const map = new YAMLMap(ctx.schema);
|
||||
map.flow = true;
|
||||
map.items.push(pair);
|
||||
coll.items.push(map);
|
||||
}
|
||||
offset = valueNode ? valueNode.range[2] : valueProps.end;
|
||||
}
|
||||
}
|
||||
const expectedEnd = isMap ? '}' : ']';
|
||||
const [ce, ...ee] = fc.end;
|
||||
let cePos = offset;
|
||||
if (ce && ce.source === expectedEnd)
|
||||
cePos = ce.offset + ce.source.length;
|
||||
else {
|
||||
const name = fcName[0].toUpperCase() + fcName.substring(1);
|
||||
const msg = atRoot
|
||||
? `${name} must end with a ${expectedEnd}`
|
||||
: `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;
|
||||
onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);
|
||||
if (ce && ce.source.length !== 1)
|
||||
ee.unshift(ce);
|
||||
}
|
||||
if (ee.length > 0) {
|
||||
const end = resolveEnd(ee, cePos, ctx.options.strict, onError);
|
||||
if (end.comment) {
|
||||
if (coll.comment)
|
||||
coll.comment += '\n' + end.comment;
|
||||
else
|
||||
coll.comment = end.comment;
|
||||
}
|
||||
coll.range = [fc.offset, cePos, end.offset];
|
||||
}
|
||||
else {
|
||||
coll.range = [fc.offset, cePos, cePos];
|
||||
}
|
||||
return coll;
|
||||
}
|
||||
|
||||
export { resolveFlowCollection };
|
223
node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js
generated
vendored
Normal file
223
node_modules/yaml/browser/dist/compose/resolve-flow-scalar.js
generated
vendored
Normal file
|
@ -0,0 +1,223 @@
|
|||
import { Scalar } from '../nodes/Scalar.js';
|
||||
import { resolveEnd } from './resolve-end.js';
|
||||
|
||||
function resolveFlowScalar(scalar, strict, onError) {
|
||||
const { offset, type, source, end } = scalar;
|
||||
let _type;
|
||||
let value;
|
||||
const _onError = (rel, code, msg) => onError(offset + rel, code, msg);
|
||||
switch (type) {
|
||||
case 'scalar':
|
||||
_type = Scalar.PLAIN;
|
||||
value = plainValue(source, _onError);
|
||||
break;
|
||||
case 'single-quoted-scalar':
|
||||
_type = Scalar.QUOTE_SINGLE;
|
||||
value = singleQuotedValue(source, _onError);
|
||||
break;
|
||||
case 'double-quoted-scalar':
|
||||
_type = Scalar.QUOTE_DOUBLE;
|
||||
value = doubleQuotedValue(source, _onError);
|
||||
break;
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);
|
||||
return {
|
||||
value: '',
|
||||
type: null,
|
||||
comment: '',
|
||||
range: [offset, offset + source.length, offset + source.length]
|
||||
};
|
||||
}
|
||||
const valueEnd = offset + source.length;
|
||||
const re = resolveEnd(end, valueEnd, strict, onError);
|
||||
return {
|
||||
value,
|
||||
type: _type,
|
||||
comment: re.comment,
|
||||
range: [offset, valueEnd, re.offset]
|
||||
};
|
||||
}
|
||||
function plainValue(source, onError) {
|
||||
let badChar = '';
|
||||
switch (source[0]) {
|
||||
/* istanbul ignore next should not happen */
|
||||
case '\t':
|
||||
badChar = 'a tab character';
|
||||
break;
|
||||
case ',':
|
||||
badChar = 'flow indicator character ,';
|
||||
break;
|
||||
case '%':
|
||||
badChar = 'directive indicator character %';
|
||||
break;
|
||||
case '|':
|
||||
case '>': {
|
||||
badChar = `block scalar indicator ${source[0]}`;
|
||||
break;
|
||||
}
|
||||
case '@':
|
||||
case '`': {
|
||||
badChar = `reserved character ${source[0]}`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (badChar)
|
||||
onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);
|
||||
return foldLines(source);
|
||||
}
|
||||
function singleQuotedValue(source, onError) {
|
||||
if (source[source.length - 1] !== "'" || source.length === 1)
|
||||
onError(source.length, 'MISSING_CHAR', "Missing closing 'quote");
|
||||
return foldLines(source.slice(1, -1)).replace(/''/g, "'");
|
||||
}
|
||||
function foldLines(source) {
|
||||
/**
|
||||
* The negative lookbehind here and in the `re` RegExp is to
|
||||
* prevent causing a polynomial search time in certain cases.
|
||||
*
|
||||
* The try-catch is for Safari, which doesn't support this yet:
|
||||
* https://caniuse.com/js-regexp-lookbehind
|
||||
*/
|
||||
let first, line;
|
||||
try {
|
||||
first = new RegExp('(.*?)(?<![ \t])[ \t]*\r?\n', 'sy');
|
||||
line = new RegExp('[ \t]*(.*?)(?:(?<![ \t])[ \t]*)?\r?\n', 'sy');
|
||||
}
|
||||
catch (_) {
|
||||
first = /(.*?)[ \t]*\r?\n/sy;
|
||||
line = /[ \t]*(.*?)[ \t]*\r?\n/sy;
|
||||
}
|
||||
let match = first.exec(source);
|
||||
if (!match)
|
||||
return source;
|
||||
let res = match[1];
|
||||
let sep = ' ';
|
||||
let pos = first.lastIndex;
|
||||
line.lastIndex = pos;
|
||||
while ((match = line.exec(source))) {
|
||||
if (match[1] === '') {
|
||||
if (sep === '\n')
|
||||
res += sep;
|
||||
else
|
||||
sep = '\n';
|
||||
}
|
||||
else {
|
||||
res += sep + match[1];
|
||||
sep = ' ';
|
||||
}
|
||||
pos = line.lastIndex;
|
||||
}
|
||||
const last = /[ \t]*(.*)/sy;
|
||||
last.lastIndex = pos;
|
||||
match = last.exec(source);
|
||||
return res + sep + (match?.[1] ?? '');
|
||||
}
|
||||
function doubleQuotedValue(source, onError) {
|
||||
let res = '';
|
||||
for (let i = 1; i < source.length - 1; ++i) {
|
||||
const ch = source[i];
|
||||
if (ch === '\r' && source[i + 1] === '\n')
|
||||
continue;
|
||||
if (ch === '\n') {
|
||||
const { fold, offset } = foldNewline(source, i);
|
||||
res += fold;
|
||||
i = offset;
|
||||
}
|
||||
else if (ch === '\\') {
|
||||
let next = source[++i];
|
||||
const cc = escapeCodes[next];
|
||||
if (cc)
|
||||
res += cc;
|
||||
else if (next === '\n') {
|
||||
// skip escaped newlines, but still trim the following line
|
||||
next = source[i + 1];
|
||||
while (next === ' ' || next === '\t')
|
||||
next = source[++i + 1];
|
||||
}
|
||||
else if (next === '\r' && source[i + 1] === '\n') {
|
||||
// skip escaped CRLF newlines, but still trim the following line
|
||||
next = source[++i + 1];
|
||||
while (next === ' ' || next === '\t')
|
||||
next = source[++i + 1];
|
||||
}
|
||||
else if (next === 'x' || next === 'u' || next === 'U') {
|
||||
const length = { x: 2, u: 4, U: 8 }[next];
|
||||
res += parseCharCode(source, i + 1, length, onError);
|
||||
i += length;
|
||||
}
|
||||
else {
|
||||
const raw = source.substr(i - 1, 2);
|
||||
onError(i - 1, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
|
||||
res += raw;
|
||||
}
|
||||
}
|
||||
else if (ch === ' ' || ch === '\t') {
|
||||
// trim trailing whitespace
|
||||
const wsStart = i;
|
||||
let next = source[i + 1];
|
||||
while (next === ' ' || next === '\t')
|
||||
next = source[++i + 1];
|
||||
if (next !== '\n' && !(next === '\r' && source[i + 2] === '\n'))
|
||||
res += i > wsStart ? source.slice(wsStart, i + 1) : ch;
|
||||
}
|
||||
else {
|
||||
res += ch;
|
||||
}
|
||||
}
|
||||
if (source[source.length - 1] !== '"' || source.length === 1)
|
||||
onError(source.length, 'MISSING_CHAR', 'Missing closing "quote');
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* Fold a single newline into a space, multiple newlines to N - 1 newlines.
|
||||
* Presumes `source[offset] === '\n'`
|
||||
*/
|
||||
function foldNewline(source, offset) {
|
||||
let fold = '';
|
||||
let ch = source[offset + 1];
|
||||
while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') {
|
||||
if (ch === '\r' && source[offset + 2] !== '\n')
|
||||
break;
|
||||
if (ch === '\n')
|
||||
fold += '\n';
|
||||
offset += 1;
|
||||
ch = source[offset + 1];
|
||||
}
|
||||
if (!fold)
|
||||
fold = ' ';
|
||||
return { fold, offset };
|
||||
}
|
||||
const escapeCodes = {
|
||||
'0': '\0',
|
||||
a: '\x07',
|
||||
b: '\b',
|
||||
e: '\x1b',
|
||||
f: '\f',
|
||||
n: '\n',
|
||||
r: '\r',
|
||||
t: '\t',
|
||||
v: '\v',
|
||||
N: '\u0085',
|
||||
_: '\u00a0',
|
||||
L: '\u2028',
|
||||
P: '\u2029',
|
||||
' ': ' ',
|
||||
'"': '"',
|
||||
'/': '/',
|
||||
'\\': '\\',
|
||||
'\t': '\t'
|
||||
};
|
||||
function parseCharCode(source, offset, length, onError) {
|
||||
const cc = source.substr(offset, length);
|
||||
const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);
|
||||
const code = ok ? parseInt(cc, 16) : NaN;
|
||||
if (isNaN(code)) {
|
||||
const raw = source.substr(offset - 2, length + 2);
|
||||
onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);
|
||||
return raw;
|
||||
}
|
||||
return String.fromCodePoint(code);
|
||||
}
|
||||
|
||||
export { resolveFlowScalar };
|
134
node_modules/yaml/browser/dist/compose/resolve-props.js
generated
vendored
Normal file
134
node_modules/yaml/browser/dist/compose/resolve-props.js
generated
vendored
Normal file
|
@ -0,0 +1,134 @@
|
|||
function resolveProps(tokens, { flow, indicator, next, offset, onError, startOnNewline }) {
|
||||
let spaceBefore = false;
|
||||
let atNewline = startOnNewline;
|
||||
let hasSpace = startOnNewline;
|
||||
let comment = '';
|
||||
let commentSep = '';
|
||||
let hasNewline = false;
|
||||
let hasNewlineAfterProp = false;
|
||||
let reqSpace = false;
|
||||
let anchor = null;
|
||||
let tag = null;
|
||||
let comma = null;
|
||||
let found = null;
|
||||
let start = null;
|
||||
for (const token of tokens) {
|
||||
if (reqSpace) {
|
||||
if (token.type !== 'space' &&
|
||||
token.type !== 'newline' &&
|
||||
token.type !== 'comma')
|
||||
onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
|
||||
reqSpace = false;
|
||||
}
|
||||
switch (token.type) {
|
||||
case 'space':
|
||||
// At the doc level, tabs at line start may be parsed
|
||||
// as leading white space rather than indentation.
|
||||
// In a flow collection, only the parser handles indent.
|
||||
if (!flow &&
|
||||
atNewline &&
|
||||
indicator !== 'doc-start' &&
|
||||
token.source[0] === '\t')
|
||||
onError(token, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');
|
||||
hasSpace = true;
|
||||
break;
|
||||
case 'comment': {
|
||||
if (!hasSpace)
|
||||
onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');
|
||||
const cb = token.source.substring(1) || ' ';
|
||||
if (!comment)
|
||||
comment = cb;
|
||||
else
|
||||
comment += commentSep + cb;
|
||||
commentSep = '';
|
||||
atNewline = false;
|
||||
break;
|
||||
}
|
||||
case 'newline':
|
||||
if (atNewline) {
|
||||
if (comment)
|
||||
comment += token.source;
|
||||
else
|
||||
spaceBefore = true;
|
||||
}
|
||||
else
|
||||
commentSep += token.source;
|
||||
atNewline = true;
|
||||
hasNewline = true;
|
||||
if (anchor || tag)
|
||||
hasNewlineAfterProp = true;
|
||||
hasSpace = true;
|
||||
break;
|
||||
case 'anchor':
|
||||
if (anchor)
|
||||
onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');
|
||||
if (token.source.endsWith(':'))
|
||||
onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);
|
||||
anchor = token;
|
||||
if (start === null)
|
||||
start = token.offset;
|
||||
atNewline = false;
|
||||
hasSpace = false;
|
||||
reqSpace = true;
|
||||
break;
|
||||
case 'tag': {
|
||||
if (tag)
|
||||
onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');
|
||||
tag = token;
|
||||
if (start === null)
|
||||
start = token.offset;
|
||||
atNewline = false;
|
||||
hasSpace = false;
|
||||
reqSpace = true;
|
||||
break;
|
||||
}
|
||||
case indicator:
|
||||
// Could here handle preceding comments differently
|
||||
if (anchor || tag)
|
||||
onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);
|
||||
if (found)
|
||||
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);
|
||||
found = token;
|
||||
atNewline = false;
|
||||
hasSpace = false;
|
||||
break;
|
||||
case 'comma':
|
||||
if (flow) {
|
||||
if (comma)
|
||||
onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);
|
||||
comma = token;
|
||||
atNewline = false;
|
||||
hasSpace = false;
|
||||
break;
|
||||
}
|
||||
// else fallthrough
|
||||
default:
|
||||
onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);
|
||||
atNewline = false;
|
||||
hasSpace = false;
|
||||
}
|
||||
}
|
||||
const last = tokens[tokens.length - 1];
|
||||
const end = last ? last.offset + last.source.length : offset;
|
||||
if (reqSpace &&
|
||||
next &&
|
||||
next.type !== 'space' &&
|
||||
next.type !== 'newline' &&
|
||||
next.type !== 'comma' &&
|
||||
(next.type !== 'scalar' || next.source !== ''))
|
||||
onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');
|
||||
return {
|
||||
comma,
|
||||
found,
|
||||
spaceBefore,
|
||||
comment,
|
||||
hasNewline,
|
||||
hasNewlineAfterProp,
|
||||
anchor,
|
||||
tag,
|
||||
end,
|
||||
start: start ?? end
|
||||
};
|
||||
}
|
||||
|
||||
export { resolveProps };
|
34
node_modules/yaml/browser/dist/compose/util-contains-newline.js
generated
vendored
Normal file
34
node_modules/yaml/browser/dist/compose/util-contains-newline.js
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
|||
function containsNewline(key) {
|
||||
if (!key)
|
||||
return null;
|
||||
switch (key.type) {
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'double-quoted-scalar':
|
||||
case 'single-quoted-scalar':
|
||||
if (key.source.includes('\n'))
|
||||
return true;
|
||||
if (key.end)
|
||||
for (const st of key.end)
|
||||
if (st.type === 'newline')
|
||||
return true;
|
||||
return false;
|
||||
case 'flow-collection':
|
||||
for (const it of key.items) {
|
||||
for (const st of it.start)
|
||||
if (st.type === 'newline')
|
||||
return true;
|
||||
if (it.sep)
|
||||
for (const st of it.sep)
|
||||
if (st.type === 'newline')
|
||||
return true;
|
||||
if (containsNewline(it.key) || containsNewline(it.value))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
export { containsNewline };
|
27
node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js
generated
vendored
Normal file
27
node_modules/yaml/browser/dist/compose/util-empty-scalar-position.js
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
function emptyScalarPosition(offset, before, pos) {
|
||||
if (before) {
|
||||
if (pos === null)
|
||||
pos = before.length;
|
||||
for (let i = pos - 1; i >= 0; --i) {
|
||||
let st = before[i];
|
||||
switch (st.type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
offset -= st.source.length;
|
||||
continue;
|
||||
}
|
||||
// Technically, an empty scalar is immediately after the last non-empty
|
||||
// node, but it's more useful to place it after any whitespace.
|
||||
st = before[++i];
|
||||
while (st?.type === 'space') {
|
||||
offset += st.source.length;
|
||||
st = before[++i];
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return offset;
|
||||
}
|
||||
|
||||
export { emptyScalarPosition };
|
15
node_modules/yaml/browser/dist/compose/util-flow-indent-check.js
generated
vendored
Normal file
15
node_modules/yaml/browser/dist/compose/util-flow-indent-check.js
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
import { containsNewline } from './util-contains-newline.js';
|
||||
|
||||
function flowIndentCheck(indent, fc, onError) {
|
||||
if (fc?.type === 'flow-collection') {
|
||||
const end = fc.end[0];
|
||||
if (end.indent === indent &&
|
||||
(end.source === ']' || end.source === '}') &&
|
||||
containsNewline(fc)) {
|
||||
const msg = 'Flow end indicator should be more indented than parent';
|
||||
onError(end, 'BAD_INDENT', msg, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { flowIndentCheck };
|
17
node_modules/yaml/browser/dist/compose/util-map-includes.js
generated
vendored
Normal file
17
node_modules/yaml/browser/dist/compose/util-map-includes.js
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
import { isScalar } from '../nodes/Node.js';
|
||||
|
||||
function mapIncludes(ctx, items, search) {
|
||||
const { uniqueKeys } = ctx.options;
|
||||
if (uniqueKeys === false)
|
||||
return false;
|
||||
const isEqual = typeof uniqueKeys === 'function'
|
||||
? uniqueKeys
|
||||
: (a, b) => a === b ||
|
||||
(isScalar(a) &&
|
||||
isScalar(b) &&
|
||||
a.value === b.value &&
|
||||
!(a.value === '<<' && ctx.schema.merge));
|
||||
return items.some(pair => isEqual(pair.key, search));
|
||||
}
|
||||
|
||||
export { mapIncludes };
|
332
node_modules/yaml/browser/dist/doc/Document.js
generated
vendored
Normal file
332
node_modules/yaml/browser/dist/doc/Document.js
generated
vendored
Normal file
|
@ -0,0 +1,332 @@
|
|||
import { Alias } from '../nodes/Alias.js';
|
||||
import { isEmptyPath, collectionFromPath } from '../nodes/Collection.js';
|
||||
import { NODE_TYPE, DOC, isNode, isCollection, isScalar } from '../nodes/Node.js';
|
||||
import { Pair } from '../nodes/Pair.js';
|
||||
import { toJS } from '../nodes/toJS.js';
|
||||
import { Schema } from '../schema/Schema.js';
|
||||
import { stringify } from '../stringify/stringify.js';
|
||||
import { stringifyDocument } from '../stringify/stringifyDocument.js';
|
||||
import { anchorNames, findNewAnchor, createNodeAnchors } from './anchors.js';
|
||||
import { applyReviver } from './applyReviver.js';
|
||||
import { createNode } from './createNode.js';
|
||||
import { Directives } from './directives.js';
|
||||
|
||||
class Document {
|
||||
constructor(value, replacer, options) {
|
||||
/** A comment before this Document */
|
||||
this.commentBefore = null;
|
||||
/** A comment immediately after this Document */
|
||||
this.comment = null;
|
||||
/** Errors encountered during parsing. */
|
||||
this.errors = [];
|
||||
/** Warnings encountered during parsing. */
|
||||
this.warnings = [];
|
||||
Object.defineProperty(this, NODE_TYPE, { value: DOC });
|
||||
let _replacer = null;
|
||||
if (typeof replacer === 'function' || Array.isArray(replacer)) {
|
||||
_replacer = replacer;
|
||||
}
|
||||
else if (options === undefined && replacer) {
|
||||
options = replacer;
|
||||
replacer = undefined;
|
||||
}
|
||||
const opt = Object.assign({
|
||||
intAsBigInt: false,
|
||||
keepSourceTokens: false,
|
||||
logLevel: 'warn',
|
||||
prettyErrors: true,
|
||||
strict: true,
|
||||
uniqueKeys: true,
|
||||
version: '1.2'
|
||||
}, options);
|
||||
this.options = opt;
|
||||
let { version } = opt;
|
||||
if (options?._directives) {
|
||||
this.directives = options._directives.atDocument();
|
||||
if (this.directives.yaml.explicit)
|
||||
version = this.directives.yaml.version;
|
||||
}
|
||||
else
|
||||
this.directives = new Directives({ version });
|
||||
this.setSchema(version, options);
|
||||
if (value === undefined)
|
||||
this.contents = null;
|
||||
else {
|
||||
this.contents = this.createNode(value, _replacer, options);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Create a deep copy of this Document and its contents.
|
||||
*
|
||||
* Custom Node values that inherit from `Object` still refer to their original instances.
|
||||
*/
|
||||
clone() {
|
||||
const copy = Object.create(Document.prototype, {
|
||||
[NODE_TYPE]: { value: DOC }
|
||||
});
|
||||
copy.commentBefore = this.commentBefore;
|
||||
copy.comment = this.comment;
|
||||
copy.errors = this.errors.slice();
|
||||
copy.warnings = this.warnings.slice();
|
||||
copy.options = Object.assign({}, this.options);
|
||||
if (this.directives)
|
||||
copy.directives = this.directives.clone();
|
||||
copy.schema = this.schema.clone();
|
||||
copy.contents = isNode(this.contents)
|
||||
? this.contents.clone(copy.schema)
|
||||
: this.contents;
|
||||
if (this.range)
|
||||
copy.range = this.range.slice();
|
||||
return copy;
|
||||
}
|
||||
/** Adds a value to the document. */
|
||||
add(value) {
|
||||
if (assertCollection(this.contents))
|
||||
this.contents.add(value);
|
||||
}
|
||||
/** Adds a value to the document. */
|
||||
addIn(path, value) {
|
||||
if (assertCollection(this.contents))
|
||||
this.contents.addIn(path, value);
|
||||
}
|
||||
/**
|
||||
* Create a new `Alias` node, ensuring that the target `node` has the required anchor.
|
||||
*
|
||||
* If `node` already has an anchor, `name` is ignored.
|
||||
* Otherwise, the `node.anchor` value will be set to `name`,
|
||||
* or if an anchor with that name is already present in the document,
|
||||
* `name` will be used as a prefix for a new unique anchor.
|
||||
* If `name` is undefined, the generated anchor will use 'a' as a prefix.
|
||||
*/
|
||||
createAlias(node, name) {
|
||||
if (!node.anchor) {
|
||||
const prev = anchorNames(this);
|
||||
node.anchor =
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
!name || prev.has(name) ? findNewAnchor(name || 'a', prev) : name;
|
||||
}
|
||||
return new Alias(node.anchor);
|
||||
}
|
||||
createNode(value, replacer, options) {
|
||||
let _replacer = undefined;
|
||||
if (typeof replacer === 'function') {
|
||||
value = replacer.call({ '': value }, '', value);
|
||||
_replacer = replacer;
|
||||
}
|
||||
else if (Array.isArray(replacer)) {
|
||||
const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;
|
||||
const asStr = replacer.filter(keyToStr).map(String);
|
||||
if (asStr.length > 0)
|
||||
replacer = replacer.concat(asStr);
|
||||
_replacer = replacer;
|
||||
}
|
||||
else if (options === undefined && replacer) {
|
||||
options = replacer;
|
||||
replacer = undefined;
|
||||
}
|
||||
const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};
|
||||
const { onAnchor, setAnchors, sourceObjects } = createNodeAnchors(this,
|
||||
// eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing
|
||||
anchorPrefix || 'a');
|
||||
const ctx = {
|
||||
aliasDuplicateObjects: aliasDuplicateObjects ?? true,
|
||||
keepUndefined: keepUndefined ?? false,
|
||||
onAnchor,
|
||||
onTagObj,
|
||||
replacer: _replacer,
|
||||
schema: this.schema,
|
||||
sourceObjects
|
||||
};
|
||||
const node = createNode(value, tag, ctx);
|
||||
if (flow && isCollection(node))
|
||||
node.flow = true;
|
||||
setAnchors();
|
||||
return node;
|
||||
}
|
||||
/**
|
||||
* Convert a key and a value into a `Pair` using the current schema,
|
||||
* recursively wrapping all values as `Scalar` or `Collection` nodes.
|
||||
*/
|
||||
createPair(key, value, options = {}) {
|
||||
const k = this.createNode(key, null, options);
|
||||
const v = this.createNode(value, null, options);
|
||||
return new Pair(k, v);
|
||||
}
|
||||
/**
|
||||
* Removes a value from the document.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
delete(key) {
|
||||
return assertCollection(this.contents) ? this.contents.delete(key) : false;
|
||||
}
|
||||
/**
|
||||
* Removes a value from the document.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
deleteIn(path) {
|
||||
if (isEmptyPath(path)) {
|
||||
if (this.contents == null)
|
||||
return false;
|
||||
this.contents = null;
|
||||
return true;
|
||||
}
|
||||
return assertCollection(this.contents)
|
||||
? this.contents.deleteIn(path)
|
||||
: false;
|
||||
}
|
||||
/**
|
||||
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
get(key, keepScalar) {
|
||||
return isCollection(this.contents)
|
||||
? this.contents.get(key, keepScalar)
|
||||
: undefined;
|
||||
}
|
||||
/**
|
||||
* Returns item at `path`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
getIn(path, keepScalar) {
|
||||
if (isEmptyPath(path))
|
||||
return !keepScalar && isScalar(this.contents)
|
||||
? this.contents.value
|
||||
: this.contents;
|
||||
return isCollection(this.contents)
|
||||
? this.contents.getIn(path, keepScalar)
|
||||
: undefined;
|
||||
}
|
||||
/**
|
||||
* Checks if the document includes a value with the key `key`.
|
||||
*/
|
||||
has(key) {
|
||||
return isCollection(this.contents) ? this.contents.has(key) : false;
|
||||
}
|
||||
/**
|
||||
* Checks if the document includes a value at `path`.
|
||||
*/
|
||||
hasIn(path) {
|
||||
if (isEmptyPath(path))
|
||||
return this.contents !== undefined;
|
||||
return isCollection(this.contents) ? this.contents.hasIn(path) : false;
|
||||
}
|
||||
/**
|
||||
* Sets a value in this document. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
set(key, value) {
|
||||
if (this.contents == null) {
|
||||
this.contents = collectionFromPath(this.schema, [key], value);
|
||||
}
|
||||
else if (assertCollection(this.contents)) {
|
||||
this.contents.set(key, value);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Sets a value in this document. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
setIn(path, value) {
|
||||
if (isEmptyPath(path))
|
||||
this.contents = value;
|
||||
else if (this.contents == null) {
|
||||
this.contents = collectionFromPath(this.schema, Array.from(path), value);
|
||||
}
|
||||
else if (assertCollection(this.contents)) {
|
||||
this.contents.setIn(path, value);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Change the YAML version and schema used by the document.
|
||||
* A `null` version disables support for directives, explicit tags, anchors, and aliases.
|
||||
* It also requires the `schema` option to be given as a `Schema` instance value.
|
||||
*
|
||||
* Overrides all previously set schema options.
|
||||
*/
|
||||
setSchema(version, options = {}) {
|
||||
if (typeof version === 'number')
|
||||
version = String(version);
|
||||
let opt;
|
||||
switch (version) {
|
||||
case '1.1':
|
||||
if (this.directives)
|
||||
this.directives.yaml.version = '1.1';
|
||||
else
|
||||
this.directives = new Directives({ version: '1.1' });
|
||||
opt = { merge: true, resolveKnownTags: false, schema: 'yaml-1.1' };
|
||||
break;
|
||||
case '1.2':
|
||||
case 'next':
|
||||
if (this.directives)
|
||||
this.directives.yaml.version = version;
|
||||
else
|
||||
this.directives = new Directives({ version });
|
||||
opt = { merge: false, resolveKnownTags: true, schema: 'core' };
|
||||
break;
|
||||
case null:
|
||||
if (this.directives)
|
||||
delete this.directives;
|
||||
opt = null;
|
||||
break;
|
||||
default: {
|
||||
const sv = JSON.stringify(version);
|
||||
throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);
|
||||
}
|
||||
}
|
||||
// Not using `instanceof Schema` to allow for duck typing
|
||||
if (options.schema instanceof Object)
|
||||
this.schema = options.schema;
|
||||
else if (opt)
|
||||
this.schema = new Schema(Object.assign(opt, options));
|
||||
else
|
||||
throw new Error(`With a null YAML version, the { schema: Schema } option is required`);
|
||||
}
|
||||
// json & jsonArg are only used from toJSON()
|
||||
toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {
|
||||
const ctx = {
|
||||
anchors: new Map(),
|
||||
doc: this,
|
||||
keep: !json,
|
||||
mapAsMap: mapAsMap === true,
|
||||
mapKeyWarned: false,
|
||||
maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100,
|
||||
stringify
|
||||
};
|
||||
const res = toJS(this.contents, jsonArg ?? '', ctx);
|
||||
if (typeof onAnchor === 'function')
|
||||
for (const { count, res } of ctx.anchors.values())
|
||||
onAnchor(res, count);
|
||||
return typeof reviver === 'function'
|
||||
? applyReviver(reviver, { '': res }, '', res)
|
||||
: res;
|
||||
}
|
||||
/**
|
||||
* A JSON representation of the document `contents`.
|
||||
*
|
||||
* @param jsonArg Used by `JSON.stringify` to indicate the array index or
|
||||
* property name.
|
||||
*/
|
||||
toJSON(jsonArg, onAnchor) {
|
||||
return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });
|
||||
}
|
||||
/** A YAML representation of the document. */
|
||||
toString(options = {}) {
|
||||
if (this.errors.length > 0)
|
||||
throw new Error('Document with errors cannot be stringified');
|
||||
if ('indent' in options &&
|
||||
(!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {
|
||||
const s = JSON.stringify(options.indent);
|
||||
throw new Error(`"indent" option must be a positive integer, not ${s}`);
|
||||
}
|
||||
return stringifyDocument(this, options);
|
||||
}
|
||||
}
|
||||
function assertCollection(contents) {
|
||||
if (isCollection(contents))
|
||||
return true;
|
||||
throw new Error('Expected a YAML collection as document contents');
|
||||
}
|
||||
|
||||
export { Document };
|
72
node_modules/yaml/browser/dist/doc/anchors.js
generated
vendored
Normal file
72
node_modules/yaml/browser/dist/doc/anchors.js
generated
vendored
Normal file
|
@ -0,0 +1,72 @@
|
|||
import { isScalar, isCollection } from '../nodes/Node.js';
|
||||
import { visit } from '../visit.js';
|
||||
|
||||
/**
|
||||
* Verify that the input string is a valid anchor.
|
||||
*
|
||||
* Will throw on errors.
|
||||
*/
|
||||
function anchorIsValid(anchor) {
|
||||
if (/[\x00-\x19\s,[\]{}]/.test(anchor)) {
|
||||
const sa = JSON.stringify(anchor);
|
||||
const msg = `Anchor must not contain whitespace or control characters: ${sa}`;
|
||||
throw new Error(msg);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function anchorNames(root) {
|
||||
const anchors = new Set();
|
||||
visit(root, {
|
||||
Value(_key, node) {
|
||||
if (node.anchor)
|
||||
anchors.add(node.anchor);
|
||||
}
|
||||
});
|
||||
return anchors;
|
||||
}
|
||||
/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */
|
||||
function findNewAnchor(prefix, exclude) {
|
||||
for (let i = 1; true; ++i) {
|
||||
const name = `${prefix}${i}`;
|
||||
if (!exclude.has(name))
|
||||
return name;
|
||||
}
|
||||
}
|
||||
function createNodeAnchors(doc, prefix) {
|
||||
const aliasObjects = [];
|
||||
const sourceObjects = new Map();
|
||||
let prevAnchors = null;
|
||||
return {
|
||||
onAnchor: (source) => {
|
||||
aliasObjects.push(source);
|
||||
if (!prevAnchors)
|
||||
prevAnchors = anchorNames(doc);
|
||||
const anchor = findNewAnchor(prefix, prevAnchors);
|
||||
prevAnchors.add(anchor);
|
||||
return anchor;
|
||||
},
|
||||
/**
|
||||
* With circular references, the source node is only resolved after all
|
||||
* of its child nodes are. This is why anchors are set only after all of
|
||||
* the nodes have been created.
|
||||
*/
|
||||
setAnchors: () => {
|
||||
for (const source of aliasObjects) {
|
||||
const ref = sourceObjects.get(source);
|
||||
if (typeof ref === 'object' &&
|
||||
ref.anchor &&
|
||||
(isScalar(ref.node) || isCollection(ref.node))) {
|
||||
ref.node.anchor = ref.anchor;
|
||||
}
|
||||
else {
|
||||
const error = new Error('Failed to resolve repeated object (this should not happen)');
|
||||
error.source = source;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
},
|
||||
sourceObjects
|
||||
};
|
||||
}
|
||||
|
||||
export { anchorIsValid, anchorNames, createNodeAnchors, findNewAnchor };
|
54
node_modules/yaml/browser/dist/doc/applyReviver.js
generated
vendored
Normal file
54
node_modules/yaml/browser/dist/doc/applyReviver.js
generated
vendored
Normal file
|
@ -0,0 +1,54 @@
|
|||
/**
|
||||
* Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,
|
||||
* in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the
|
||||
* 2021 edition: https://tc39.es/ecma262/#sec-json.parse
|
||||
*
|
||||
* Includes extensions for handling Map and Set objects.
|
||||
*/
|
||||
function applyReviver(reviver, obj, key, val) {
|
||||
if (val && typeof val === 'object') {
|
||||
if (Array.isArray(val)) {
|
||||
for (let i = 0, len = val.length; i < len; ++i) {
|
||||
const v0 = val[i];
|
||||
const v1 = applyReviver(reviver, val, String(i), v0);
|
||||
if (v1 === undefined)
|
||||
delete val[i];
|
||||
else if (v1 !== v0)
|
||||
val[i] = v1;
|
||||
}
|
||||
}
|
||||
else if (val instanceof Map) {
|
||||
for (const k of Array.from(val.keys())) {
|
||||
const v0 = val.get(k);
|
||||
const v1 = applyReviver(reviver, val, k, v0);
|
||||
if (v1 === undefined)
|
||||
val.delete(k);
|
||||
else if (v1 !== v0)
|
||||
val.set(k, v1);
|
||||
}
|
||||
}
|
||||
else if (val instanceof Set) {
|
||||
for (const v0 of Array.from(val)) {
|
||||
const v1 = applyReviver(reviver, val, v0, v0);
|
||||
if (v1 === undefined)
|
||||
val.delete(v0);
|
||||
else if (v1 !== v0) {
|
||||
val.delete(v0);
|
||||
val.add(v1);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (const [k, v0] of Object.entries(val)) {
|
||||
const v1 = applyReviver(reviver, val, k, v0);
|
||||
if (v1 === undefined)
|
||||
delete val[k];
|
||||
else if (v1 !== v0)
|
||||
val[k] = v1;
|
||||
}
|
||||
}
|
||||
}
|
||||
return reviver.call(obj, key, val);
|
||||
}
|
||||
|
||||
export { applyReviver };
|
85
node_modules/yaml/browser/dist/doc/createNode.js
generated
vendored
Normal file
85
node_modules/yaml/browser/dist/doc/createNode.js
generated
vendored
Normal file
|
@ -0,0 +1,85 @@
|
|||
import { Alias } from '../nodes/Alias.js';
|
||||
import { isNode, isPair, MAP, SEQ, isDocument } from '../nodes/Node.js';
|
||||
import { Scalar } from '../nodes/Scalar.js';
|
||||
|
||||
const defaultTagPrefix = 'tag:yaml.org,2002:';
|
||||
function findTagObject(value, tagName, tags) {
|
||||
if (tagName) {
|
||||
const match = tags.filter(t => t.tag === tagName);
|
||||
const tagObj = match.find(t => !t.format) ?? match[0];
|
||||
if (!tagObj)
|
||||
throw new Error(`Tag ${tagName} not found`);
|
||||
return tagObj;
|
||||
}
|
||||
return tags.find(t => t.identify?.(value) && !t.format);
|
||||
}
|
||||
function createNode(value, tagName, ctx) {
|
||||
if (isDocument(value))
|
||||
value = value.contents;
|
||||
if (isNode(value))
|
||||
return value;
|
||||
if (isPair(value)) {
|
||||
const map = ctx.schema[MAP].createNode?.(ctx.schema, null, ctx);
|
||||
map.items.push(value);
|
||||
return map;
|
||||
}
|
||||
if (value instanceof String ||
|
||||
value instanceof Number ||
|
||||
value instanceof Boolean ||
|
||||
(typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere
|
||||
) {
|
||||
// https://tc39.es/ecma262/#sec-serializejsonproperty
|
||||
value = value.valueOf();
|
||||
}
|
||||
const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;
|
||||
// Detect duplicate references to the same object & use Alias nodes for all
|
||||
// after first. The `ref` wrapper allows for circular references to resolve.
|
||||
let ref = undefined;
|
||||
if (aliasDuplicateObjects && value && typeof value === 'object') {
|
||||
ref = sourceObjects.get(value);
|
||||
if (ref) {
|
||||
if (!ref.anchor)
|
||||
ref.anchor = onAnchor(value);
|
||||
return new Alias(ref.anchor);
|
||||
}
|
||||
else {
|
||||
ref = { anchor: null, node: null };
|
||||
sourceObjects.set(value, ref);
|
||||
}
|
||||
}
|
||||
if (tagName?.startsWith('!!'))
|
||||
tagName = defaultTagPrefix + tagName.slice(2);
|
||||
let tagObj = findTagObject(value, tagName, schema.tags);
|
||||
if (!tagObj) {
|
||||
if (value && typeof value.toJSON === 'function') {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
|
||||
value = value.toJSON();
|
||||
}
|
||||
if (!value || typeof value !== 'object') {
|
||||
const node = new Scalar(value);
|
||||
if (ref)
|
||||
ref.node = node;
|
||||
return node;
|
||||
}
|
||||
tagObj =
|
||||
value instanceof Map
|
||||
? schema[MAP]
|
||||
: Symbol.iterator in Object(value)
|
||||
? schema[SEQ]
|
||||
: schema[MAP];
|
||||
}
|
||||
if (onTagObj) {
|
||||
onTagObj(tagObj);
|
||||
delete ctx.onTagObj;
|
||||
}
|
||||
const node = tagObj?.createNode
|
||||
? tagObj.createNode(ctx.schema, value, ctx)
|
||||
: new Scalar(value);
|
||||
if (tagName)
|
||||
node.tag = tagName;
|
||||
if (ref)
|
||||
ref.node = node;
|
||||
return node;
|
||||
}
|
||||
|
||||
export { createNode };
|
169
node_modules/yaml/browser/dist/doc/directives.js
generated
vendored
Normal file
169
node_modules/yaml/browser/dist/doc/directives.js
generated
vendored
Normal file
|
@ -0,0 +1,169 @@
|
|||
import { isNode } from '../nodes/Node.js';
|
||||
import { visit } from '../visit.js';
|
||||
|
||||
const escapeChars = {
|
||||
'!': '%21',
|
||||
',': '%2C',
|
||||
'[': '%5B',
|
||||
']': '%5D',
|
||||
'{': '%7B',
|
||||
'}': '%7D'
|
||||
};
|
||||
const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]);
|
||||
class Directives {
|
||||
constructor(yaml, tags) {
|
||||
/**
|
||||
* The directives-end/doc-start marker `---`. If `null`, a marker may still be
|
||||
* included in the document's stringified representation.
|
||||
*/
|
||||
this.docStart = null;
|
||||
/** The doc-end marker `...`. */
|
||||
this.docEnd = false;
|
||||
this.yaml = Object.assign({}, Directives.defaultYaml, yaml);
|
||||
this.tags = Object.assign({}, Directives.defaultTags, tags);
|
||||
}
|
||||
clone() {
|
||||
const copy = new Directives(this.yaml, this.tags);
|
||||
copy.docStart = this.docStart;
|
||||
return copy;
|
||||
}
|
||||
/**
|
||||
* During parsing, get a Directives instance for the current document and
|
||||
* update the stream state according to the current version's spec.
|
||||
*/
|
||||
atDocument() {
|
||||
const res = new Directives(this.yaml, this.tags);
|
||||
switch (this.yaml.version) {
|
||||
case '1.1':
|
||||
this.atNextDocument = true;
|
||||
break;
|
||||
case '1.2':
|
||||
this.atNextDocument = false;
|
||||
this.yaml = {
|
||||
explicit: Directives.defaultYaml.explicit,
|
||||
version: '1.2'
|
||||
};
|
||||
this.tags = Object.assign({}, Directives.defaultTags);
|
||||
break;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* @param onError - May be called even if the action was successful
|
||||
* @returns `true` on success
|
||||
*/
|
||||
add(line, onError) {
|
||||
if (this.atNextDocument) {
|
||||
this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };
|
||||
this.tags = Object.assign({}, Directives.defaultTags);
|
||||
this.atNextDocument = false;
|
||||
}
|
||||
const parts = line.trim().split(/[ \t]+/);
|
||||
const name = parts.shift();
|
||||
switch (name) {
|
||||
case '%TAG': {
|
||||
if (parts.length !== 2) {
|
||||
onError(0, '%TAG directive should contain exactly two parts');
|
||||
if (parts.length < 2)
|
||||
return false;
|
||||
}
|
||||
const [handle, prefix] = parts;
|
||||
this.tags[handle] = prefix;
|
||||
return true;
|
||||
}
|
||||
case '%YAML': {
|
||||
this.yaml.explicit = true;
|
||||
if (parts.length !== 1) {
|
||||
onError(0, '%YAML directive should contain exactly one part');
|
||||
return false;
|
||||
}
|
||||
const [version] = parts;
|
||||
if (version === '1.1' || version === '1.2') {
|
||||
this.yaml.version = version;
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
const isValid = /^\d+\.\d+$/.test(version);
|
||||
onError(6, `Unsupported YAML version ${version}`, isValid);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
default:
|
||||
onError(0, `Unknown directive ${name}`, true);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Resolves a tag, matching handles to those defined in %TAG directives.
|
||||
*
|
||||
* @returns Resolved tag, which may also be the non-specific tag `'!'` or a
|
||||
* `'!local'` tag, or `null` if unresolvable.
|
||||
*/
|
||||
tagName(source, onError) {
|
||||
if (source === '!')
|
||||
return '!'; // non-specific tag
|
||||
if (source[0] !== '!') {
|
||||
onError(`Not a valid tag: ${source}`);
|
||||
return null;
|
||||
}
|
||||
if (source[1] === '<') {
|
||||
const verbatim = source.slice(2, -1);
|
||||
if (verbatim === '!' || verbatim === '!!') {
|
||||
onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);
|
||||
return null;
|
||||
}
|
||||
if (source[source.length - 1] !== '>')
|
||||
onError('Verbatim tags must end with a >');
|
||||
return verbatim;
|
||||
}
|
||||
const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/);
|
||||
if (!suffix)
|
||||
onError(`The ${source} tag has no suffix`);
|
||||
const prefix = this.tags[handle];
|
||||
if (prefix)
|
||||
return prefix + decodeURIComponent(suffix);
|
||||
if (handle === '!')
|
||||
return source; // local tag
|
||||
onError(`Could not resolve tag: ${source}`);
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Given a fully resolved tag, returns its printable string form,
|
||||
* taking into account current tag prefixes and defaults.
|
||||
*/
|
||||
tagString(tag) {
|
||||
for (const [handle, prefix] of Object.entries(this.tags)) {
|
||||
if (tag.startsWith(prefix))
|
||||
return handle + escapeTagName(tag.substring(prefix.length));
|
||||
}
|
||||
return tag[0] === '!' ? tag : `!<${tag}>`;
|
||||
}
|
||||
toString(doc) {
|
||||
const lines = this.yaml.explicit
|
||||
? [`%YAML ${this.yaml.version || '1.2'}`]
|
||||
: [];
|
||||
const tagEntries = Object.entries(this.tags);
|
||||
let tagNames;
|
||||
if (doc && tagEntries.length > 0 && isNode(doc.contents)) {
|
||||
const tags = {};
|
||||
visit(doc.contents, (_key, node) => {
|
||||
if (isNode(node) && node.tag)
|
||||
tags[node.tag] = true;
|
||||
});
|
||||
tagNames = Object.keys(tags);
|
||||
}
|
||||
else
|
||||
tagNames = [];
|
||||
for (const [handle, prefix] of tagEntries) {
|
||||
if (handle === '!!' && prefix === 'tag:yaml.org,2002:')
|
||||
continue;
|
||||
if (!doc || tagNames.some(tn => tn.startsWith(prefix)))
|
||||
lines.push(`%TAG ${handle} ${prefix}`);
|
||||
}
|
||||
return lines.join('\n');
|
||||
}
|
||||
}
|
||||
Directives.defaultYaml = { explicit: false, version: '1.2' };
|
||||
Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' };
|
||||
|
||||
export { Directives };
|
57
node_modules/yaml/browser/dist/errors.js
generated
vendored
Normal file
57
node_modules/yaml/browser/dist/errors.js
generated
vendored
Normal file
|
@ -0,0 +1,57 @@
|
|||
class YAMLError extends Error {
|
||||
constructor(name, pos, code, message) {
|
||||
super();
|
||||
this.name = name;
|
||||
this.code = code;
|
||||
this.message = message;
|
||||
this.pos = pos;
|
||||
}
|
||||
}
|
||||
class YAMLParseError extends YAMLError {
|
||||
constructor(pos, code, message) {
|
||||
super('YAMLParseError', pos, code, message);
|
||||
}
|
||||
}
|
||||
class YAMLWarning extends YAMLError {
|
||||
constructor(pos, code, message) {
|
||||
super('YAMLWarning', pos, code, message);
|
||||
}
|
||||
}
|
||||
const prettifyError = (src, lc) => (error) => {
|
||||
if (error.pos[0] === -1)
|
||||
return;
|
||||
error.linePos = error.pos.map(pos => lc.linePos(pos));
|
||||
const { line, col } = error.linePos[0];
|
||||
error.message += ` at line ${line}, column ${col}`;
|
||||
let ci = col - 1;
|
||||
let lineStr = src
|
||||
.substring(lc.lineStarts[line - 1], lc.lineStarts[line])
|
||||
.replace(/[\n\r]+$/, '');
|
||||
// Trim to max 80 chars, keeping col position near the middle
|
||||
if (ci >= 60 && lineStr.length > 80) {
|
||||
const trimStart = Math.min(ci - 39, lineStr.length - 79);
|
||||
lineStr = '…' + lineStr.substring(trimStart);
|
||||
ci -= trimStart - 1;
|
||||
}
|
||||
if (lineStr.length > 80)
|
||||
lineStr = lineStr.substring(0, 79) + '…';
|
||||
// Include previous line in context if pointing at line start
|
||||
if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {
|
||||
// Regexp won't match if start is trimmed
|
||||
let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);
|
||||
if (prev.length > 80)
|
||||
prev = prev.substring(0, 79) + '…\n';
|
||||
lineStr = prev + lineStr;
|
||||
}
|
||||
if (/[^ ]/.test(lineStr)) {
|
||||
let count = 1;
|
||||
const end = error.linePos[1];
|
||||
if (end && end.line === line && end.col > col) {
|
||||
count = Math.min(end.col - col, 80 - ci);
|
||||
}
|
||||
const pointer = ' '.repeat(ci) + '^'.repeat(count);
|
||||
error.message += `:\n\n${lineStr}\n${pointer}\n`;
|
||||
}
|
||||
};
|
||||
|
||||
export { YAMLError, YAMLParseError, YAMLWarning, prettifyError };
|
17
node_modules/yaml/browser/dist/index.js
generated
vendored
Normal file
17
node_modules/yaml/browser/dist/index.js
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
export { Composer } from './compose/composer.js';
|
||||
export { Document } from './doc/Document.js';
|
||||
export { Schema } from './schema/Schema.js';
|
||||
export { YAMLError, YAMLParseError, YAMLWarning } from './errors.js';
|
||||
export { Alias } from './nodes/Alias.js';
|
||||
export { isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq } from './nodes/Node.js';
|
||||
export { Pair } from './nodes/Pair.js';
|
||||
export { Scalar } from './nodes/Scalar.js';
|
||||
export { YAMLMap } from './nodes/YAMLMap.js';
|
||||
export { YAMLSeq } from './nodes/YAMLSeq.js';
|
||||
import * as cst from './parse/cst.js';
|
||||
export { cst as CST };
|
||||
export { Lexer } from './parse/lexer.js';
|
||||
export { LineCounter } from './parse/line-counter.js';
|
||||
export { Parser } from './parse/parser.js';
|
||||
export { parse, parseAllDocuments, parseDocument, stringify } from './public-api.js';
|
||||
export { visit, visitAsync } from './visit.js';
|
14
node_modules/yaml/browser/dist/log.js
generated
vendored
Normal file
14
node_modules/yaml/browser/dist/log.js
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
function debug(logLevel, ...messages) {
|
||||
if (logLevel === 'debug')
|
||||
console.log(...messages);
|
||||
}
|
||||
function warn(logLevel, warning) {
|
||||
if (logLevel === 'debug' || logLevel === 'warn') {
|
||||
if (typeof process !== 'undefined' && process.emitWarning)
|
||||
process.emitWarning(warning);
|
||||
else
|
||||
console.warn(warning);
|
||||
}
|
||||
}
|
||||
|
||||
export { debug, warn };
|
164
node_modules/yaml/browser/dist/node_modules/tslib/tslib.es6.js
generated
vendored
Normal file
164
node_modules/yaml/browser/dist/node_modules/tslib/tslib.es6.js
generated
vendored
Normal file
|
@ -0,0 +1,164 @@
|
|||
/*! *****************************************************************************
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
PERFORMANCE OF THIS SOFTWARE.
|
||||
***************************************************************************** */
|
||||
|
||||
/* global Reflect, Promise */
|
||||
var extendStatics = function (d, b) {
|
||||
extendStatics = Object.setPrototypeOf || {
|
||||
__proto__: []
|
||||
} instanceof Array && function (d, b) {
|
||||
d.__proto__ = b;
|
||||
} || function (d, b) {
|
||||
for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
|
||||
};
|
||||
|
||||
return extendStatics(d, b);
|
||||
};
|
||||
|
||||
function __extends(d, b) {
|
||||
if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
|
||||
extendStatics(d, b);
|
||||
|
||||
function __() {
|
||||
this.constructor = d;
|
||||
}
|
||||
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
}
|
||||
function __generator(thisArg, body) {
|
||||
var _ = {
|
||||
label: 0,
|
||||
sent: function () {
|
||||
if (t[0] & 1) throw t[1];
|
||||
return t[1];
|
||||
},
|
||||
trys: [],
|
||||
ops: []
|
||||
},
|
||||
f,
|
||||
y,
|
||||
t,
|
||||
g;
|
||||
return g = {
|
||||
next: verb(0),
|
||||
"throw": verb(1),
|
||||
"return": verb(2)
|
||||
}, typeof Symbol === "function" && (g[Symbol.iterator] = function () {
|
||||
return this;
|
||||
}), g;
|
||||
|
||||
function verb(n) {
|
||||
return function (v) {
|
||||
return step([n, v]);
|
||||
};
|
||||
}
|
||||
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
|
||||
switch (op[0]) {
|
||||
case 0:
|
||||
case 1:
|
||||
t = op;
|
||||
break;
|
||||
|
||||
case 4:
|
||||
_.label++;
|
||||
return {
|
||||
value: op[1],
|
||||
done: false
|
||||
};
|
||||
|
||||
case 5:
|
||||
_.label++;
|
||||
y = op[1];
|
||||
op = [0];
|
||||
continue;
|
||||
|
||||
case 7:
|
||||
op = _.ops.pop();
|
||||
|
||||
_.trys.pop();
|
||||
|
||||
continue;
|
||||
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) {
|
||||
_ = 0;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) {
|
||||
_.label = op[1];
|
||||
break;
|
||||
}
|
||||
|
||||
if (op[0] === 6 && _.label < t[1]) {
|
||||
_.label = t[1];
|
||||
t = op;
|
||||
break;
|
||||
}
|
||||
|
||||
if (t && _.label < t[2]) {
|
||||
_.label = t[2];
|
||||
|
||||
_.ops.push(op);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if (t[2]) _.ops.pop();
|
||||
|
||||
_.trys.pop();
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) {
|
||||
op = [6, e];
|
||||
y = 0;
|
||||
} finally {
|
||||
f = t = 0;
|
||||
}
|
||||
|
||||
if (op[0] & 5) throw op[1];
|
||||
return {
|
||||
value: op[0] ? op[1] : void 0,
|
||||
done: true
|
||||
};
|
||||
}
|
||||
}
|
||||
function __values(o) {
|
||||
var s = typeof Symbol === "function" && Symbol.iterator,
|
||||
m = s && o[s],
|
||||
i = 0;
|
||||
if (m) return m.call(o);
|
||||
if (o && typeof o.length === "number") return {
|
||||
next: function () {
|
||||
if (o && i >= o.length) o = void 0;
|
||||
return {
|
||||
value: o && o[i++],
|
||||
done: !o
|
||||
};
|
||||
}
|
||||
};
|
||||
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
|
||||
}
|
||||
|
||||
export { __extends, __generator, __values };
|
94
node_modules/yaml/browser/dist/nodes/Alias.js
generated
vendored
Normal file
94
node_modules/yaml/browser/dist/nodes/Alias.js
generated
vendored
Normal file
|
@ -0,0 +1,94 @@
|
|||
import { anchorIsValid } from '../doc/anchors.js';
|
||||
import { visit } from '../visit.js';
|
||||
import { NodeBase, ALIAS, isAlias, isCollection, isPair } from './Node.js';
|
||||
|
||||
class Alias extends NodeBase {
|
||||
constructor(source) {
|
||||
super(ALIAS);
|
||||
this.source = source;
|
||||
Object.defineProperty(this, 'tag', {
|
||||
set() {
|
||||
throw new Error('Alias nodes cannot have tags');
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Resolve the value of this alias within `doc`, finding the last
|
||||
* instance of the `source` anchor before this node.
|
||||
*/
|
||||
resolve(doc) {
|
||||
let found = undefined;
|
||||
visit(doc, {
|
||||
Node: (_key, node) => {
|
||||
if (node === this)
|
||||
return visit.BREAK;
|
||||
if (node.anchor === this.source)
|
||||
found = node;
|
||||
}
|
||||
});
|
||||
return found;
|
||||
}
|
||||
toJSON(_arg, ctx) {
|
||||
if (!ctx)
|
||||
return { source: this.source };
|
||||
const { anchors, doc, maxAliasCount } = ctx;
|
||||
const source = this.resolve(doc);
|
||||
if (!source) {
|
||||
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
|
||||
throw new ReferenceError(msg);
|
||||
}
|
||||
const data = anchors.get(source);
|
||||
/* istanbul ignore if */
|
||||
if (!data || data.res === undefined) {
|
||||
const msg = 'This should not happen: Alias anchor was not resolved?';
|
||||
throw new ReferenceError(msg);
|
||||
}
|
||||
if (maxAliasCount >= 0) {
|
||||
data.count += 1;
|
||||
if (data.aliasCount === 0)
|
||||
data.aliasCount = getAliasCount(doc, source, anchors);
|
||||
if (data.count * data.aliasCount > maxAliasCount) {
|
||||
const msg = 'Excessive alias count indicates a resource exhaustion attack';
|
||||
throw new ReferenceError(msg);
|
||||
}
|
||||
}
|
||||
return data.res;
|
||||
}
|
||||
toString(ctx, _onComment, _onChompKeep) {
|
||||
const src = `*${this.source}`;
|
||||
if (ctx) {
|
||||
anchorIsValid(this.source);
|
||||
if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {
|
||||
const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;
|
||||
throw new Error(msg);
|
||||
}
|
||||
if (ctx.implicitKey)
|
||||
return `${src} `;
|
||||
}
|
||||
return src;
|
||||
}
|
||||
}
|
||||
function getAliasCount(doc, node, anchors) {
|
||||
if (isAlias(node)) {
|
||||
const source = node.resolve(doc);
|
||||
const anchor = anchors && source && anchors.get(source);
|
||||
return anchor ? anchor.count * anchor.aliasCount : 0;
|
||||
}
|
||||
else if (isCollection(node)) {
|
||||
let count = 0;
|
||||
for (const item of node.items) {
|
||||
const c = getAliasCount(doc, item, anchors);
|
||||
if (c > count)
|
||||
count = c;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
else if (isPair(node)) {
|
||||
const kc = getAliasCount(doc, node.key, anchors);
|
||||
const vc = getAliasCount(doc, node.value, anchors);
|
||||
return Math.max(kc, vc);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
export { Alias };
|
147
node_modules/yaml/browser/dist/nodes/Collection.js
generated
vendored
Normal file
147
node_modules/yaml/browser/dist/nodes/Collection.js
generated
vendored
Normal file
|
@ -0,0 +1,147 @@
|
|||
import { createNode } from '../doc/createNode.js';
|
||||
import { NodeBase, isNode, isPair, isCollection, isScalar } from './Node.js';
|
||||
|
||||
function collectionFromPath(schema, path, value) {
|
||||
let v = value;
|
||||
for (let i = path.length - 1; i >= 0; --i) {
|
||||
const k = path[i];
|
||||
if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {
|
||||
const a = [];
|
||||
a[k] = v;
|
||||
v = a;
|
||||
}
|
||||
else {
|
||||
v = new Map([[k, v]]);
|
||||
}
|
||||
}
|
||||
return createNode(v, undefined, {
|
||||
aliasDuplicateObjects: false,
|
||||
keepUndefined: false,
|
||||
onAnchor: () => {
|
||||
throw new Error('This should not happen, please report a bug.');
|
||||
},
|
||||
schema,
|
||||
sourceObjects: new Map()
|
||||
});
|
||||
}
|
||||
// Type guard is intentionally a little wrong so as to be more useful,
|
||||
// as it does not cover untypable empty non-string iterables (e.g. []).
|
||||
const isEmptyPath = (path) => path == null ||
|
||||
(typeof path === 'object' && !!path[Symbol.iterator]().next().done);
|
||||
class Collection extends NodeBase {
|
||||
constructor(type, schema) {
|
||||
super(type);
|
||||
Object.defineProperty(this, 'schema', {
|
||||
value: schema,
|
||||
configurable: true,
|
||||
enumerable: false,
|
||||
writable: true
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Create a copy of this collection.
|
||||
*
|
||||
* @param schema - If defined, overwrites the original's schema
|
||||
*/
|
||||
clone(schema) {
|
||||
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
|
||||
if (schema)
|
||||
copy.schema = schema;
|
||||
copy.items = copy.items.map(it => isNode(it) || isPair(it) ? it.clone(schema) : it);
|
||||
if (this.range)
|
||||
copy.range = this.range.slice();
|
||||
return copy;
|
||||
}
|
||||
/**
|
||||
* Adds a value to the collection. For `!!map` and `!!omap` the value must
|
||||
* be a Pair instance or a `{ key, value }` object, which may not have a key
|
||||
* that already exists in the map.
|
||||
*/
|
||||
addIn(path, value) {
|
||||
if (isEmptyPath(path))
|
||||
this.add(value);
|
||||
else {
|
||||
const [key, ...rest] = path;
|
||||
const node = this.get(key, true);
|
||||
if (isCollection(node))
|
||||
node.addIn(rest, value);
|
||||
else if (node === undefined && this.schema)
|
||||
this.set(key, collectionFromPath(this.schema, rest, value));
|
||||
else
|
||||
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes a value from the collection.
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
deleteIn(path) {
|
||||
const [key, ...rest] = path;
|
||||
if (rest.length === 0)
|
||||
return this.delete(key);
|
||||
const node = this.get(key, true);
|
||||
if (isCollection(node))
|
||||
return node.deleteIn(rest);
|
||||
else
|
||||
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||||
}
|
||||
/**
|
||||
* Returns item at `key`, or `undefined` if not found. By default unwraps
|
||||
* scalar values from their surrounding node; to disable set `keepScalar` to
|
||||
* `true` (collections are always returned intact).
|
||||
*/
|
||||
getIn(path, keepScalar) {
|
||||
const [key, ...rest] = path;
|
||||
const node = this.get(key, true);
|
||||
if (rest.length === 0)
|
||||
return !keepScalar && isScalar(node) ? node.value : node;
|
||||
else
|
||||
return isCollection(node) ? node.getIn(rest, keepScalar) : undefined;
|
||||
}
|
||||
hasAllNullValues(allowScalar) {
|
||||
return this.items.every(node => {
|
||||
if (!isPair(node))
|
||||
return false;
|
||||
const n = node.value;
|
||||
return (n == null ||
|
||||
(allowScalar &&
|
||||
isScalar(n) &&
|
||||
n.value == null &&
|
||||
!n.commentBefore &&
|
||||
!n.comment &&
|
||||
!n.tag));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Checks if the collection includes a value with the key `key`.
|
||||
*/
|
||||
hasIn(path) {
|
||||
const [key, ...rest] = path;
|
||||
if (rest.length === 0)
|
||||
return this.has(key);
|
||||
const node = this.get(key, true);
|
||||
return isCollection(node) ? node.hasIn(rest) : false;
|
||||
}
|
||||
/**
|
||||
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*/
|
||||
setIn(path, value) {
|
||||
const [key, ...rest] = path;
|
||||
if (rest.length === 0) {
|
||||
this.set(key, value);
|
||||
}
|
||||
else {
|
||||
const node = this.get(key, true);
|
||||
if (isCollection(node))
|
||||
node.setIn(rest, value);
|
||||
else if (node === undefined && this.schema)
|
||||
this.set(key, collectionFromPath(this.schema, rest, value));
|
||||
else
|
||||
throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
Collection.maxFlowStringSingleLineLength = 60;
|
||||
|
||||
export { Collection, collectionFromPath, isEmptyPath };
|
48
node_modules/yaml/browser/dist/nodes/Node.js
generated
vendored
Normal file
48
node_modules/yaml/browser/dist/nodes/Node.js
generated
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
const ALIAS = Symbol.for('yaml.alias');
|
||||
const DOC = Symbol.for('yaml.document');
|
||||
const MAP = Symbol.for('yaml.map');
|
||||
const PAIR = Symbol.for('yaml.pair');
|
||||
const SCALAR = Symbol.for('yaml.scalar');
|
||||
const SEQ = Symbol.for('yaml.seq');
|
||||
const NODE_TYPE = Symbol.for('yaml.node.type');
|
||||
const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;
|
||||
const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;
|
||||
const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;
|
||||
const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;
|
||||
const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;
|
||||
const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;
|
||||
function isCollection(node) {
|
||||
if (node && typeof node === 'object')
|
||||
switch (node[NODE_TYPE]) {
|
||||
case MAP:
|
||||
case SEQ:
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function isNode(node) {
|
||||
if (node && typeof node === 'object')
|
||||
switch (node[NODE_TYPE]) {
|
||||
case ALIAS:
|
||||
case MAP:
|
||||
case SCALAR:
|
||||
case SEQ:
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;
|
||||
class NodeBase {
|
||||
constructor(type) {
|
||||
Object.defineProperty(this, NODE_TYPE, { value: type });
|
||||
}
|
||||
/** Create a copy of this node. */
|
||||
clone() {
|
||||
const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));
|
||||
if (this.range)
|
||||
copy.range = this.range.slice();
|
||||
return copy;
|
||||
}
|
||||
}
|
||||
|
||||
export { ALIAS, DOC, MAP, NODE_TYPE, NodeBase, PAIR, SCALAR, SEQ, hasAnchor, isAlias, isCollection, isDocument, isMap, isNode, isPair, isScalar, isSeq };
|
36
node_modules/yaml/browser/dist/nodes/Pair.js
generated
vendored
Normal file
36
node_modules/yaml/browser/dist/nodes/Pair.js
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
import { createNode } from '../doc/createNode.js';
|
||||
import { stringifyPair } from '../stringify/stringifyPair.js';
|
||||
import { addPairToJSMap } from './addPairToJSMap.js';
|
||||
import { NODE_TYPE, PAIR, isNode } from './Node.js';
|
||||
|
||||
function createPair(key, value, ctx) {
|
||||
const k = createNode(key, undefined, ctx);
|
||||
const v = createNode(value, undefined, ctx);
|
||||
return new Pair(k, v);
|
||||
}
|
||||
class Pair {
|
||||
constructor(key, value = null) {
|
||||
Object.defineProperty(this, NODE_TYPE, { value: PAIR });
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
}
|
||||
clone(schema) {
|
||||
let { key, value } = this;
|
||||
if (isNode(key))
|
||||
key = key.clone(schema);
|
||||
if (isNode(value))
|
||||
value = value.clone(schema);
|
||||
return new Pair(key, value);
|
||||
}
|
||||
toJSON(_, ctx) {
|
||||
const pair = ctx?.mapAsMap ? new Map() : {};
|
||||
return addPairToJSMap(ctx, pair, this);
|
||||
}
|
||||
toString(ctx, onComment, onChompKeep) {
|
||||
return ctx?.doc
|
||||
? stringifyPair(this, ctx, onComment, onChompKeep)
|
||||
: JSON.stringify(this);
|
||||
}
|
||||
}
|
||||
|
||||
export { Pair, createPair };
|
23
node_modules/yaml/browser/dist/nodes/Scalar.js
generated
vendored
Normal file
23
node_modules/yaml/browser/dist/nodes/Scalar.js
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
import { NodeBase, SCALAR } from './Node.js';
|
||||
import { toJS } from './toJS.js';
|
||||
|
||||
const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');
|
||||
class Scalar extends NodeBase {
|
||||
constructor(value) {
|
||||
super(SCALAR);
|
||||
this.value = value;
|
||||
}
|
||||
toJSON(arg, ctx) {
|
||||
return ctx?.keep ? this.value : toJS(this.value, arg, ctx);
|
||||
}
|
||||
toString() {
|
||||
return String(this.value);
|
||||
}
|
||||
}
|
||||
Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED';
|
||||
Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL';
|
||||
Scalar.PLAIN = 'PLAIN';
|
||||
Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';
|
||||
Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE';
|
||||
|
||||
export { Scalar, isScalarValue };
|
116
node_modules/yaml/browser/dist/nodes/YAMLMap.js
generated
vendored
Normal file
116
node_modules/yaml/browser/dist/nodes/YAMLMap.js
generated
vendored
Normal file
|
@ -0,0 +1,116 @@
|
|||
import { stringifyCollection } from '../stringify/stringifyCollection.js';
|
||||
import { addPairToJSMap } from './addPairToJSMap.js';
|
||||
import { Collection } from './Collection.js';
|
||||
import { isPair, isScalar, MAP } from './Node.js';
|
||||
import { Pair } from './Pair.js';
|
||||
import { isScalarValue } from './Scalar.js';
|
||||
|
||||
function findPair(items, key) {
|
||||
const k = isScalar(key) ? key.value : key;
|
||||
for (const it of items) {
|
||||
if (isPair(it)) {
|
||||
if (it.key === key || it.key === k)
|
||||
return it;
|
||||
if (isScalar(it.key) && it.key.value === k)
|
||||
return it;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
class YAMLMap extends Collection {
|
||||
static get tagName() {
|
||||
return 'tag:yaml.org,2002:map';
|
||||
}
|
||||
constructor(schema) {
|
||||
super(MAP, schema);
|
||||
this.items = [];
|
||||
}
|
||||
/**
|
||||
* Adds a value to the collection.
|
||||
*
|
||||
* @param overwrite - If not set `true`, using a key that is already in the
|
||||
* collection will throw. Otherwise, overwrites the previous value.
|
||||
*/
|
||||
add(pair, overwrite) {
|
||||
let _pair;
|
||||
if (isPair(pair))
|
||||
_pair = pair;
|
||||
else if (!pair || typeof pair !== 'object' || !('key' in pair)) {
|
||||
// In TypeScript, this never happens.
|
||||
_pair = new Pair(pair, pair?.value);
|
||||
}
|
||||
else
|
||||
_pair = new Pair(pair.key, pair.value);
|
||||
const prev = findPair(this.items, _pair.key);
|
||||
const sortEntries = this.schema?.sortMapEntries;
|
||||
if (prev) {
|
||||
if (!overwrite)
|
||||
throw new Error(`Key ${_pair.key} already set`);
|
||||
// For scalars, keep the old node & its comments and anchors
|
||||
if (isScalar(prev.value) && isScalarValue(_pair.value))
|
||||
prev.value.value = _pair.value;
|
||||
else
|
||||
prev.value = _pair.value;
|
||||
}
|
||||
else if (sortEntries) {
|
||||
const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);
|
||||
if (i === -1)
|
||||
this.items.push(_pair);
|
||||
else
|
||||
this.items.splice(i, 0, _pair);
|
||||
}
|
||||
else {
|
||||
this.items.push(_pair);
|
||||
}
|
||||
}
|
||||
delete(key) {
|
||||
const it = findPair(this.items, key);
|
||||
if (!it)
|
||||
return false;
|
||||
const del = this.items.splice(this.items.indexOf(it), 1);
|
||||
return del.length > 0;
|
||||
}
|
||||
get(key, keepScalar) {
|
||||
const it = findPair(this.items, key);
|
||||
const node = it?.value;
|
||||
return (!keepScalar && isScalar(node) ? node.value : node) ?? undefined;
|
||||
}
|
||||
has(key) {
|
||||
return !!findPair(this.items, key);
|
||||
}
|
||||
set(key, value) {
|
||||
this.add(new Pair(key, value), true);
|
||||
}
|
||||
/**
|
||||
* @param ctx - Conversion context, originally set in Document#toJS()
|
||||
* @param {Class} Type - If set, forces the returned collection type
|
||||
* @returns Instance of Type, Map, or Object
|
||||
*/
|
||||
toJSON(_, ctx, Type) {
|
||||
const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};
|
||||
if (ctx?.onCreate)
|
||||
ctx.onCreate(map);
|
||||
for (const item of this.items)
|
||||
addPairToJSMap(ctx, map, item);
|
||||
return map;
|
||||
}
|
||||
toString(ctx, onComment, onChompKeep) {
|
||||
if (!ctx)
|
||||
return JSON.stringify(this);
|
||||
for (const item of this.items) {
|
||||
if (!isPair(item))
|
||||
throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);
|
||||
}
|
||||
if (!ctx.allNullValues && this.hasAllNullValues(false))
|
||||
ctx = Object.assign({}, ctx, { allNullValues: true });
|
||||
return stringifyCollection(this, ctx, {
|
||||
blockItemPrefix: '',
|
||||
flowChars: { start: '{', end: '}' },
|
||||
itemIndent: ctx.indent || '',
|
||||
onChompKeep,
|
||||
onComment
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export { YAMLMap, findPair };
|
97
node_modules/yaml/browser/dist/nodes/YAMLSeq.js
generated
vendored
Normal file
97
node_modules/yaml/browser/dist/nodes/YAMLSeq.js
generated
vendored
Normal file
|
@ -0,0 +1,97 @@
|
|||
import { stringifyCollection } from '../stringify/stringifyCollection.js';
|
||||
import { Collection } from './Collection.js';
|
||||
import { SEQ, isScalar } from './Node.js';
|
||||
import { isScalarValue } from './Scalar.js';
|
||||
import { toJS } from './toJS.js';
|
||||
|
||||
class YAMLSeq extends Collection {
|
||||
static get tagName() {
|
||||
return 'tag:yaml.org,2002:seq';
|
||||
}
|
||||
constructor(schema) {
|
||||
super(SEQ, schema);
|
||||
this.items = [];
|
||||
}
|
||||
add(value) {
|
||||
this.items.push(value);
|
||||
}
|
||||
/**
|
||||
* Removes a value from the collection.
|
||||
*
|
||||
* `key` must contain a representation of an integer for this to succeed.
|
||||
* It may be wrapped in a `Scalar`.
|
||||
*
|
||||
* @returns `true` if the item was found and removed.
|
||||
*/
|
||||
delete(key) {
|
||||
const idx = asItemIndex(key);
|
||||
if (typeof idx !== 'number')
|
||||
return false;
|
||||
const del = this.items.splice(idx, 1);
|
||||
return del.length > 0;
|
||||
}
|
||||
get(key, keepScalar) {
|
||||
const idx = asItemIndex(key);
|
||||
if (typeof idx !== 'number')
|
||||
return undefined;
|
||||
const it = this.items[idx];
|
||||
return !keepScalar && isScalar(it) ? it.value : it;
|
||||
}
|
||||
/**
|
||||
* Checks if the collection includes a value with the key `key`.
|
||||
*
|
||||
* `key` must contain a representation of an integer for this to succeed.
|
||||
* It may be wrapped in a `Scalar`.
|
||||
*/
|
||||
has(key) {
|
||||
const idx = asItemIndex(key);
|
||||
return typeof idx === 'number' && idx < this.items.length;
|
||||
}
|
||||
/**
|
||||
* Sets a value in this collection. For `!!set`, `value` needs to be a
|
||||
* boolean to add/remove the item from the set.
|
||||
*
|
||||
* If `key` does not contain a representation of an integer, this will throw.
|
||||
* It may be wrapped in a `Scalar`.
|
||||
*/
|
||||
set(key, value) {
|
||||
const idx = asItemIndex(key);
|
||||
if (typeof idx !== 'number')
|
||||
throw new Error(`Expected a valid index, not ${key}.`);
|
||||
const prev = this.items[idx];
|
||||
if (isScalar(prev) && isScalarValue(value))
|
||||
prev.value = value;
|
||||
else
|
||||
this.items[idx] = value;
|
||||
}
|
||||
toJSON(_, ctx) {
|
||||
const seq = [];
|
||||
if (ctx?.onCreate)
|
||||
ctx.onCreate(seq);
|
||||
let i = 0;
|
||||
for (const item of this.items)
|
||||
seq.push(toJS(item, String(i++), ctx));
|
||||
return seq;
|
||||
}
|
||||
toString(ctx, onComment, onChompKeep) {
|
||||
if (!ctx)
|
||||
return JSON.stringify(this);
|
||||
return stringifyCollection(this, ctx, {
|
||||
blockItemPrefix: '- ',
|
||||
flowChars: { start: '[', end: ']' },
|
||||
itemIndent: (ctx.indent || '') + ' ',
|
||||
onChompKeep,
|
||||
onComment
|
||||
});
|
||||
}
|
||||
}
|
||||
function asItemIndex(key) {
|
||||
let idx = isScalar(key) ? key.value : key;
|
||||
if (idx && typeof idx === 'string')
|
||||
idx = Number(idx);
|
||||
return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0
|
||||
? idx
|
||||
: null;
|
||||
}
|
||||
|
||||
export { YAMLSeq };
|
104
node_modules/yaml/browser/dist/nodes/addPairToJSMap.js
generated
vendored
Normal file
104
node_modules/yaml/browser/dist/nodes/addPairToJSMap.js
generated
vendored
Normal file
|
@ -0,0 +1,104 @@
|
|||
import { warn } from '../log.js';
|
||||
import { createStringifyContext } from '../stringify/stringify.js';
|
||||
import { isAlias, isSeq, isScalar, isMap, isNode } from './Node.js';
|
||||
import { Scalar } from './Scalar.js';
|
||||
import { toJS } from './toJS.js';
|
||||
|
||||
const MERGE_KEY = '<<';
|
||||
function addPairToJSMap(ctx, map, { key, value }) {
|
||||
if (ctx?.doc.schema.merge && isMergeKey(key)) {
|
||||
value = isAlias(value) ? value.resolve(ctx.doc) : value;
|
||||
if (isSeq(value))
|
||||
for (const it of value.items)
|
||||
mergeToJSMap(ctx, map, it);
|
||||
else if (Array.isArray(value))
|
||||
for (const it of value)
|
||||
mergeToJSMap(ctx, map, it);
|
||||
else
|
||||
mergeToJSMap(ctx, map, value);
|
||||
}
|
||||
else {
|
||||
const jsKey = toJS(key, '', ctx);
|
||||
if (map instanceof Map) {
|
||||
map.set(jsKey, toJS(value, jsKey, ctx));
|
||||
}
|
||||
else if (map instanceof Set) {
|
||||
map.add(jsKey);
|
||||
}
|
||||
else {
|
||||
const stringKey = stringifyKey(key, jsKey, ctx);
|
||||
const jsValue = toJS(value, stringKey, ctx);
|
||||
if (stringKey in map)
|
||||
Object.defineProperty(map, stringKey, {
|
||||
value: jsValue,
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
else
|
||||
map[stringKey] = jsValue;
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
const isMergeKey = (key) => key === MERGE_KEY ||
|
||||
(isScalar(key) &&
|
||||
key.value === MERGE_KEY &&
|
||||
(!key.type || key.type === Scalar.PLAIN));
|
||||
// If the value associated with a merge key is a single mapping node, each of
|
||||
// its key/value pairs is inserted into the current mapping, unless the key
|
||||
// already exists in it. If the value associated with the merge key is a
|
||||
// sequence, then this sequence is expected to contain mapping nodes and each
|
||||
// of these nodes is merged in turn according to its order in the sequence.
|
||||
// Keys in mapping nodes earlier in the sequence override keys specified in
|
||||
// later mapping nodes. -- http://yaml.org/type/merge.html
|
||||
function mergeToJSMap(ctx, map, value) {
|
||||
const source = ctx && isAlias(value) ? value.resolve(ctx.doc) : value;
|
||||
if (!isMap(source))
|
||||
throw new Error('Merge sources must be maps or map aliases');
|
||||
const srcMap = source.toJSON(null, ctx, Map);
|
||||
for (const [key, value] of srcMap) {
|
||||
if (map instanceof Map) {
|
||||
if (!map.has(key))
|
||||
map.set(key, value);
|
||||
}
|
||||
else if (map instanceof Set) {
|
||||
map.add(key);
|
||||
}
|
||||
else if (!Object.prototype.hasOwnProperty.call(map, key)) {
|
||||
Object.defineProperty(map, key, {
|
||||
value,
|
||||
writable: true,
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
}
|
||||
}
|
||||
return map;
|
||||
}
|
||||
function stringifyKey(key, jsKey, ctx) {
|
||||
if (jsKey === null)
|
||||
return '';
|
||||
if (typeof jsKey !== 'object')
|
||||
return String(jsKey);
|
||||
if (isNode(key) && ctx && ctx.doc) {
|
||||
const strCtx = createStringifyContext(ctx.doc, {});
|
||||
strCtx.anchors = new Set();
|
||||
for (const node of ctx.anchors.keys())
|
||||
strCtx.anchors.add(node.anchor);
|
||||
strCtx.inFlow = true;
|
||||
strCtx.inStringifyKey = true;
|
||||
const strKey = key.toString(strCtx);
|
||||
if (!ctx.mapKeyWarned) {
|
||||
let jsonStr = JSON.stringify(strKey);
|
||||
if (jsonStr.length > 40)
|
||||
jsonStr = jsonStr.substring(0, 36) + '..."';
|
||||
warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);
|
||||
ctx.mapKeyWarned = true;
|
||||
}
|
||||
return strKey;
|
||||
}
|
||||
return JSON.stringify(jsKey);
|
||||
}
|
||||
|
||||
export { addPairToJSMap };
|
37
node_modules/yaml/browser/dist/nodes/toJS.js
generated
vendored
Normal file
37
node_modules/yaml/browser/dist/nodes/toJS.js
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
import { hasAnchor } from './Node.js';
|
||||
|
||||
/**
|
||||
* Recursively convert any node or its contents to native JavaScript
|
||||
*
|
||||
* @param value - The input value
|
||||
* @param arg - If `value` defines a `toJSON()` method, use this
|
||||
* as its first argument
|
||||
* @param ctx - Conversion context, originally set in Document#toJS(). If
|
||||
* `{ keep: true }` is not set, output should be suitable for JSON
|
||||
* stringification.
|
||||
*/
|
||||
function toJS(value, arg, ctx) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-return
|
||||
if (Array.isArray(value))
|
||||
return value.map((v, i) => toJS(v, String(i), ctx));
|
||||
if (value && typeof value.toJSON === 'function') {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
|
||||
if (!ctx || !hasAnchor(value))
|
||||
return value.toJSON(arg, ctx);
|
||||
const data = { aliasCount: 0, count: 1, res: undefined };
|
||||
ctx.anchors.set(value, data);
|
||||
ctx.onCreate = res => {
|
||||
data.res = res;
|
||||
delete ctx.onCreate;
|
||||
};
|
||||
const res = value.toJSON(arg, ctx);
|
||||
if (ctx.onCreate)
|
||||
ctx.onCreate(res);
|
||||
return res;
|
||||
}
|
||||
if (typeof value === 'bigint' && !ctx?.keep)
|
||||
return Number(value);
|
||||
return value;
|
||||
}
|
||||
|
||||
export { toJS };
|
214
node_modules/yaml/browser/dist/parse/cst-scalar.js
generated
vendored
Normal file
214
node_modules/yaml/browser/dist/parse/cst-scalar.js
generated
vendored
Normal file
|
@ -0,0 +1,214 @@
|
|||
import { resolveBlockScalar } from '../compose/resolve-block-scalar.js';
|
||||
import { resolveFlowScalar } from '../compose/resolve-flow-scalar.js';
|
||||
import { YAMLParseError } from '../errors.js';
|
||||
import { stringifyString } from '../stringify/stringifyString.js';
|
||||
|
||||
function resolveAsScalar(token, strict = true, onError) {
|
||||
if (token) {
|
||||
const _onError = (pos, code, message) => {
|
||||
const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;
|
||||
if (onError)
|
||||
onError(offset, code, message);
|
||||
else
|
||||
throw new YAMLParseError([offset, offset + 1], code, message);
|
||||
};
|
||||
switch (token.type) {
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
return resolveFlowScalar(token, strict, _onError);
|
||||
case 'block-scalar':
|
||||
return resolveBlockScalar(token, strict, _onError);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Create a new scalar token with `value`
|
||||
*
|
||||
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
|
||||
* as this function does not support any schema operations and won't check for such conflicts.
|
||||
*
|
||||
* @param value The string representation of the value, which will have its content properly indented.
|
||||
* @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
|
||||
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
|
||||
* @param context.indent The indent level of the token.
|
||||
* @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
|
||||
* @param context.offset The offset position of the token.
|
||||
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
|
||||
*/
|
||||
function createScalarToken(value, context) {
|
||||
const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;
|
||||
const source = stringifyString({ type, value }, {
|
||||
implicitKey,
|
||||
indent: indent > 0 ? ' '.repeat(indent) : '',
|
||||
inFlow,
|
||||
options: { blockQuote: true, lineWidth: -1 }
|
||||
});
|
||||
const end = context.end ?? [
|
||||
{ type: 'newline', offset: -1, indent, source: '\n' }
|
||||
];
|
||||
switch (source[0]) {
|
||||
case '|':
|
||||
case '>': {
|
||||
const he = source.indexOf('\n');
|
||||
const head = source.substring(0, he);
|
||||
const body = source.substring(he + 1) + '\n';
|
||||
const props = [
|
||||
{ type: 'block-scalar-header', offset, indent, source: head }
|
||||
];
|
||||
if (!addEndtoBlockProps(props, end))
|
||||
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
|
||||
return { type: 'block-scalar', offset, indent, props, source: body };
|
||||
}
|
||||
case '"':
|
||||
return { type: 'double-quoted-scalar', offset, indent, source, end };
|
||||
case "'":
|
||||
return { type: 'single-quoted-scalar', offset, indent, source, end };
|
||||
default:
|
||||
return { type: 'scalar', offset, indent, source, end };
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
|
||||
*
|
||||
* Best efforts are made to retain any comments previously associated with the `token`,
|
||||
* though all contents within a collection's `items` will be overwritten.
|
||||
*
|
||||
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
|
||||
* as this function does not support any schema operations and won't check for such conflicts.
|
||||
*
|
||||
* @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
|
||||
* @param value The string representation of the value, which will have its content properly indented.
|
||||
* @param context.afterKey In most cases, values after a key should have an additional level of indentation.
|
||||
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
|
||||
* @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
|
||||
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
|
||||
*/
|
||||
function setScalarValue(token, value, context = {}) {
|
||||
let { afterKey = false, implicitKey = false, inFlow = false, type } = context;
|
||||
let indent = 'indent' in token ? token.indent : null;
|
||||
if (afterKey && typeof indent === 'number')
|
||||
indent += 2;
|
||||
if (!type)
|
||||
switch (token.type) {
|
||||
case 'single-quoted-scalar':
|
||||
type = 'QUOTE_SINGLE';
|
||||
break;
|
||||
case 'double-quoted-scalar':
|
||||
type = 'QUOTE_DOUBLE';
|
||||
break;
|
||||
case 'block-scalar': {
|
||||
const header = token.props[0];
|
||||
if (header.type !== 'block-scalar-header')
|
||||
throw new Error('Invalid block scalar header');
|
||||
type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';
|
||||
break;
|
||||
}
|
||||
default:
|
||||
type = 'PLAIN';
|
||||
}
|
||||
const source = stringifyString({ type, value }, {
|
||||
implicitKey: implicitKey || indent === null,
|
||||
indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',
|
||||
inFlow,
|
||||
options: { blockQuote: true, lineWidth: -1 }
|
||||
});
|
||||
switch (source[0]) {
|
||||
case '|':
|
||||
case '>':
|
||||
setBlockScalarValue(token, source);
|
||||
break;
|
||||
case '"':
|
||||
setFlowScalarValue(token, source, 'double-quoted-scalar');
|
||||
break;
|
||||
case "'":
|
||||
setFlowScalarValue(token, source, 'single-quoted-scalar');
|
||||
break;
|
||||
default:
|
||||
setFlowScalarValue(token, source, 'scalar');
|
||||
}
|
||||
}
|
||||
function setBlockScalarValue(token, source) {
|
||||
const he = source.indexOf('\n');
|
||||
const head = source.substring(0, he);
|
||||
const body = source.substring(he + 1) + '\n';
|
||||
if (token.type === 'block-scalar') {
|
||||
const header = token.props[0];
|
||||
if (header.type !== 'block-scalar-header')
|
||||
throw new Error('Invalid block scalar header');
|
||||
header.source = head;
|
||||
token.source = body;
|
||||
}
|
||||
else {
|
||||
const { offset } = token;
|
||||
const indent = 'indent' in token ? token.indent : -1;
|
||||
const props = [
|
||||
{ type: 'block-scalar-header', offset, indent, source: head }
|
||||
];
|
||||
if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))
|
||||
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
|
||||
for (const key of Object.keys(token))
|
||||
if (key !== 'type' && key !== 'offset')
|
||||
delete token[key];
|
||||
Object.assign(token, { type: 'block-scalar', indent, props, source: body });
|
||||
}
|
||||
}
|
||||
/** @returns `true` if last token is a newline */
|
||||
function addEndtoBlockProps(props, end) {
|
||||
if (end)
|
||||
for (const st of end)
|
||||
switch (st.type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
props.push(st);
|
||||
break;
|
||||
case 'newline':
|
||||
props.push(st);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function setFlowScalarValue(token, source, type) {
|
||||
switch (token.type) {
|
||||
case 'scalar':
|
||||
case 'double-quoted-scalar':
|
||||
case 'single-quoted-scalar':
|
||||
token.type = type;
|
||||
token.source = source;
|
||||
break;
|
||||
case 'block-scalar': {
|
||||
const end = token.props.slice(1);
|
||||
let oa = source.length;
|
||||
if (token.props[0].type === 'block-scalar-header')
|
||||
oa -= token.props[0].source.length;
|
||||
for (const tok of end)
|
||||
tok.offset += oa;
|
||||
delete token.props;
|
||||
Object.assign(token, { type, source, end });
|
||||
break;
|
||||
}
|
||||
case 'block-map':
|
||||
case 'block-seq': {
|
||||
const offset = token.offset + source.length;
|
||||
const nl = { type: 'newline', offset, indent: token.indent, source: '\n' };
|
||||
delete token.items;
|
||||
Object.assign(token, { type, source, end: [nl] });
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
const indent = 'indent' in token ? token.indent : -1;
|
||||
const end = 'end' in token && Array.isArray(token.end)
|
||||
? token.end.filter(st => st.type === 'space' ||
|
||||
st.type === 'comment' ||
|
||||
st.type === 'newline')
|
||||
: [];
|
||||
for (const key of Object.keys(token))
|
||||
if (key !== 'type' && key !== 'offset')
|
||||
delete token[key];
|
||||
Object.assign(token, { type, indent, source, end });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { createScalarToken, resolveAsScalar, setScalarValue };
|
61
node_modules/yaml/browser/dist/parse/cst-stringify.js
generated
vendored
Normal file
61
node_modules/yaml/browser/dist/parse/cst-stringify.js
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
/**
|
||||
* Stringify a CST document, token, or collection item
|
||||
*
|
||||
* Fair warning: This applies no validation whatsoever, and
|
||||
* simply concatenates the sources in their logical order.
|
||||
*/
|
||||
const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);
|
||||
function stringifyToken(token) {
|
||||
switch (token.type) {
|
||||
case 'block-scalar': {
|
||||
let res = '';
|
||||
for (const tok of token.props)
|
||||
res += stringifyToken(tok);
|
||||
return res + token.source;
|
||||
}
|
||||
case 'block-map':
|
||||
case 'block-seq': {
|
||||
let res = '';
|
||||
for (const item of token.items)
|
||||
res += stringifyItem(item);
|
||||
return res;
|
||||
}
|
||||
case 'flow-collection': {
|
||||
let res = token.start.source;
|
||||
for (const item of token.items)
|
||||
res += stringifyItem(item);
|
||||
for (const st of token.end)
|
||||
res += st.source;
|
||||
return res;
|
||||
}
|
||||
case 'document': {
|
||||
let res = stringifyItem(token);
|
||||
if (token.end)
|
||||
for (const st of token.end)
|
||||
res += st.source;
|
||||
return res;
|
||||
}
|
||||
default: {
|
||||
let res = token.source;
|
||||
if ('end' in token && token.end)
|
||||
for (const st of token.end)
|
||||
res += st.source;
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
function stringifyItem({ start, key, sep, value }) {
|
||||
let res = '';
|
||||
for (const st of start)
|
||||
res += st.source;
|
||||
if (key)
|
||||
res += stringifyToken(key);
|
||||
if (sep)
|
||||
for (const st of sep)
|
||||
res += st.source;
|
||||
if (value)
|
||||
res += stringifyToken(value);
|
||||
return res;
|
||||
}
|
||||
|
||||
export { stringify };
|
97
node_modules/yaml/browser/dist/parse/cst-visit.js
generated
vendored
Normal file
97
node_modules/yaml/browser/dist/parse/cst-visit.js
generated
vendored
Normal file
|
@ -0,0 +1,97 @@
|
|||
const BREAK = Symbol('break visit');
|
||||
const SKIP = Symbol('skip children');
|
||||
const REMOVE = Symbol('remove item');
|
||||
/**
|
||||
* Apply a visitor to a CST document or item.
|
||||
*
|
||||
* Walks through the tree (depth-first) starting from the root, calling a
|
||||
* `visitor` function with two arguments when entering each item:
|
||||
* - `item`: The current item, which included the following members:
|
||||
* - `start: SourceToken[]` – Source tokens before the key or value,
|
||||
* possibly including its anchor or tag.
|
||||
* - `key?: Token | null` – Set for pair values. May then be `null`, if
|
||||
* the key before the `:` separator is empty.
|
||||
* - `sep?: SourceToken[]` – Source tokens between the key and the value,
|
||||
* which should include the `:` map value indicator if `value` is set.
|
||||
* - `value?: Token` – The value of a sequence item, or of a map pair.
|
||||
* - `path`: The steps from the root to the current node, as an array of
|
||||
* `['key' | 'value', number]` tuples.
|
||||
*
|
||||
* The return value of the visitor may be used to control the traversal:
|
||||
* - `undefined` (default): Do nothing and continue
|
||||
* - `visit.SKIP`: Do not visit the children of this token, continue with
|
||||
* next sibling
|
||||
* - `visit.BREAK`: Terminate traversal completely
|
||||
* - `visit.REMOVE`: Remove the current item, then continue with the next one
|
||||
* - `number`: Set the index of the next step. This is useful especially if
|
||||
* the index of the current token has changed.
|
||||
* - `function`: Define the next visitor for this item. After the original
|
||||
* visitor is called on item entry, next visitors are called after handling
|
||||
* a non-empty `key` and when exiting the item.
|
||||
*/
|
||||
function visit(cst, visitor) {
|
||||
if ('type' in cst && cst.type === 'document')
|
||||
cst = { start: cst.start, value: cst.value };
|
||||
_visit(Object.freeze([]), cst, visitor);
|
||||
}
|
||||
// Without the `as symbol` casts, TS declares these in the `visit`
|
||||
// namespace using `var`, but then complains about that because
|
||||
// `unique symbol` must be `const`.
|
||||
/** Terminate visit traversal completely */
|
||||
visit.BREAK = BREAK;
|
||||
/** Do not visit the children of the current item */
|
||||
visit.SKIP = SKIP;
|
||||
/** Remove the current item */
|
||||
visit.REMOVE = REMOVE;
|
||||
/** Find the item at `path` from `cst` as the root */
|
||||
visit.itemAtPath = (cst, path) => {
|
||||
let item = cst;
|
||||
for (const [field, index] of path) {
|
||||
const tok = item?.[field];
|
||||
if (tok && 'items' in tok) {
|
||||
item = tok.items[index];
|
||||
}
|
||||
else
|
||||
return undefined;
|
||||
}
|
||||
return item;
|
||||
};
|
||||
/**
|
||||
* Get the immediate parent collection of the item at `path` from `cst` as the root.
|
||||
*
|
||||
* Throws an error if the collection is not found, which should never happen if the item itself exists.
|
||||
*/
|
||||
visit.parentCollection = (cst, path) => {
|
||||
const parent = visit.itemAtPath(cst, path.slice(0, -1));
|
||||
const field = path[path.length - 1][0];
|
||||
const coll = parent?.[field];
|
||||
if (coll && 'items' in coll)
|
||||
return coll;
|
||||
throw new Error('Parent collection not found');
|
||||
};
|
||||
function _visit(path, item, visitor) {
|
||||
let ctrl = visitor(item, path);
|
||||
if (typeof ctrl === 'symbol')
|
||||
return ctrl;
|
||||
for (const field of ['key', 'value']) {
|
||||
const token = item[field];
|
||||
if (token && 'items' in token) {
|
||||
for (let i = 0; i < token.items.length; ++i) {
|
||||
const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);
|
||||
if (typeof ci === 'number')
|
||||
i = ci - 1;
|
||||
else if (ci === BREAK)
|
||||
return BREAK;
|
||||
else if (ci === REMOVE) {
|
||||
token.items.splice(i, 1);
|
||||
i -= 1;
|
||||
}
|
||||
}
|
||||
if (typeof ctrl === 'function' && field === 'key')
|
||||
ctrl = ctrl(item, path);
|
||||
}
|
||||
}
|
||||
return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;
|
||||
}
|
||||
|
||||
export { visit };
|
98
node_modules/yaml/browser/dist/parse/cst.js
generated
vendored
Normal file
98
node_modules/yaml/browser/dist/parse/cst.js
generated
vendored
Normal file
|
@ -0,0 +1,98 @@
|
|||
export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar.js';
|
||||
export { stringify } from './cst-stringify.js';
|
||||
export { visit } from './cst-visit.js';
|
||||
|
||||
/** The byte order mark */
|
||||
const BOM = '\u{FEFF}';
|
||||
/** Start of doc-mode */
|
||||
const DOCUMENT = '\x02'; // C0: Start of Text
|
||||
/** Unexpected end of flow-mode */
|
||||
const FLOW_END = '\x18'; // C0: Cancel
|
||||
/** Next token is a scalar value */
|
||||
const SCALAR = '\x1f'; // C0: Unit Separator
|
||||
/** @returns `true` if `token` is a flow or block collection */
|
||||
const isCollection = (token) => !!token && 'items' in token;
|
||||
/** @returns `true` if `token` is a flow or block scalar; not an alias */
|
||||
const isScalar = (token) => !!token &&
|
||||
(token.type === 'scalar' ||
|
||||
token.type === 'single-quoted-scalar' ||
|
||||
token.type === 'double-quoted-scalar' ||
|
||||
token.type === 'block-scalar');
|
||||
/* istanbul ignore next */
|
||||
/** Get a printable representation of a lexer token */
|
||||
function prettyToken(token) {
|
||||
switch (token) {
|
||||
case BOM:
|
||||
return '<BOM>';
|
||||
case DOCUMENT:
|
||||
return '<DOC>';
|
||||
case FLOW_END:
|
||||
return '<FLOW_END>';
|
||||
case SCALAR:
|
||||
return '<SCALAR>';
|
||||
default:
|
||||
return JSON.stringify(token);
|
||||
}
|
||||
}
|
||||
/** Identify the type of a lexer token. May return `null` for unknown tokens. */
|
||||
function tokenType(source) {
|
||||
switch (source) {
|
||||
case BOM:
|
||||
return 'byte-order-mark';
|
||||
case DOCUMENT:
|
||||
return 'doc-mode';
|
||||
case FLOW_END:
|
||||
return 'flow-error-end';
|
||||
case SCALAR:
|
||||
return 'scalar';
|
||||
case '---':
|
||||
return 'doc-start';
|
||||
case '...':
|
||||
return 'doc-end';
|
||||
case '':
|
||||
case '\n':
|
||||
case '\r\n':
|
||||
return 'newline';
|
||||
case '-':
|
||||
return 'seq-item-ind';
|
||||
case '?':
|
||||
return 'explicit-key-ind';
|
||||
case ':':
|
||||
return 'map-value-ind';
|
||||
case '{':
|
||||
return 'flow-map-start';
|
||||
case '}':
|
||||
return 'flow-map-end';
|
||||
case '[':
|
||||
return 'flow-seq-start';
|
||||
case ']':
|
||||
return 'flow-seq-end';
|
||||
case ',':
|
||||
return 'comma';
|
||||
}
|
||||
switch (source[0]) {
|
||||
case ' ':
|
||||
case '\t':
|
||||
return 'space';
|
||||
case '#':
|
||||
return 'comment';
|
||||
case '%':
|
||||
return 'directive-line';
|
||||
case '*':
|
||||
return 'alias';
|
||||
case '&':
|
||||
return 'anchor';
|
||||
case '!':
|
||||
return 'tag';
|
||||
case "'":
|
||||
return 'single-quoted-scalar';
|
||||
case '"':
|
||||
return 'double-quoted-scalar';
|
||||
case '|':
|
||||
case '>':
|
||||
return 'block-scalar-header';
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export { BOM, DOCUMENT, FLOW_END, SCALAR, isCollection, isScalar, prettyToken, tokenType };
|
701
node_modules/yaml/browser/dist/parse/lexer.js
generated
vendored
Normal file
701
node_modules/yaml/browser/dist/parse/lexer.js
generated
vendored
Normal file
|
@ -0,0 +1,701 @@
|
|||
import { BOM, DOCUMENT, FLOW_END, SCALAR } from './cst.js';
|
||||
|
||||
/*
|
||||
START -> stream
|
||||
|
||||
stream
|
||||
directive -> line-end -> stream
|
||||
indent + line-end -> stream
|
||||
[else] -> line-start
|
||||
|
||||
line-end
|
||||
comment -> line-end
|
||||
newline -> .
|
||||
input-end -> END
|
||||
|
||||
line-start
|
||||
doc-start -> doc
|
||||
doc-end -> stream
|
||||
[else] -> indent -> block-start
|
||||
|
||||
block-start
|
||||
seq-item-start -> block-start
|
||||
explicit-key-start -> block-start
|
||||
map-value-start -> block-start
|
||||
[else] -> doc
|
||||
|
||||
doc
|
||||
line-end -> line-start
|
||||
spaces -> doc
|
||||
anchor -> doc
|
||||
tag -> doc
|
||||
flow-start -> flow -> doc
|
||||
flow-end -> error -> doc
|
||||
seq-item-start -> error -> doc
|
||||
explicit-key-start -> error -> doc
|
||||
map-value-start -> doc
|
||||
alias -> doc
|
||||
quote-start -> quoted-scalar -> doc
|
||||
block-scalar-header -> line-end -> block-scalar(min) -> line-start
|
||||
[else] -> plain-scalar(false, min) -> doc
|
||||
|
||||
flow
|
||||
line-end -> flow
|
||||
spaces -> flow
|
||||
anchor -> flow
|
||||
tag -> flow
|
||||
flow-start -> flow -> flow
|
||||
flow-end -> .
|
||||
seq-item-start -> error -> flow
|
||||
explicit-key-start -> flow
|
||||
map-value-start -> flow
|
||||
alias -> flow
|
||||
quote-start -> quoted-scalar -> flow
|
||||
comma -> flow
|
||||
[else] -> plain-scalar(true, 0) -> flow
|
||||
|
||||
quoted-scalar
|
||||
quote-end -> .
|
||||
[else] -> quoted-scalar
|
||||
|
||||
block-scalar(min)
|
||||
newline + peek(indent < min) -> .
|
||||
[else] -> block-scalar(min)
|
||||
|
||||
plain-scalar(is-flow, min)
|
||||
scalar-end(is-flow) -> .
|
||||
peek(newline + (indent < min)) -> .
|
||||
[else] -> plain-scalar(min)
|
||||
*/
|
||||
function isEmpty(ch) {
|
||||
switch (ch) {
|
||||
case undefined:
|
||||
case ' ':
|
||||
case '\n':
|
||||
case '\r':
|
||||
case '\t':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const hexDigits = '0123456789ABCDEFabcdef'.split('');
|
||||
const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
|
||||
const invalidFlowScalarChars = ',[]{}'.split('');
|
||||
const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
|
||||
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
|
||||
/**
|
||||
* Splits an input string into lexical tokens, i.e. smaller strings that are
|
||||
* easily identifiable by `tokens.tokenType()`.
|
||||
*
|
||||
* Lexing starts always in a "stream" context. Incomplete input may be buffered
|
||||
* until a complete token can be emitted.
|
||||
*
|
||||
* In addition to slices of the original input, the following control characters
|
||||
* may also be emitted:
|
||||
*
|
||||
* - `\x02` (Start of Text): A document starts with the next token
|
||||
* - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
|
||||
* - `\x1f` (Unit Separator): Next token is a scalar value
|
||||
* - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
|
||||
*/
|
||||
class Lexer {
|
||||
constructor() {
|
||||
/**
|
||||
* Flag indicating whether the end of the current buffer marks the end of
|
||||
* all input
|
||||
*/
|
||||
this.atEnd = false;
|
||||
/**
|
||||
* Explicit indent set in block scalar header, as an offset from the current
|
||||
* minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
|
||||
* explicitly set.
|
||||
*/
|
||||
this.blockScalarIndent = -1;
|
||||
/**
|
||||
* Block scalars that include a + (keep) chomping indicator in their header
|
||||
* include trailing empty lines, which are otherwise excluded from the
|
||||
* scalar's contents.
|
||||
*/
|
||||
this.blockScalarKeep = false;
|
||||
/** Current input */
|
||||
this.buffer = '';
|
||||
/**
|
||||
* Flag noting whether the map value indicator : can immediately follow this
|
||||
* node within a flow context.
|
||||
*/
|
||||
this.flowKey = false;
|
||||
/** Count of surrounding flow collection levels. */
|
||||
this.flowLevel = 0;
|
||||
/**
|
||||
* Minimum level of indentation required for next lines to be parsed as a
|
||||
* part of the current scalar value.
|
||||
*/
|
||||
this.indentNext = 0;
|
||||
/** Indentation level of the current line. */
|
||||
this.indentValue = 0;
|
||||
/** Position of the next \n character. */
|
||||
this.lineEndPos = null;
|
||||
/** Stores the state of the lexer if reaching the end of incpomplete input */
|
||||
this.next = null;
|
||||
/** A pointer to `buffer`; the current position of the lexer. */
|
||||
this.pos = 0;
|
||||
}
|
||||
/**
|
||||
* Generate YAML tokens from the `source` string. If `incomplete`,
|
||||
* a part of the last line may be left as a buffer for the next call.
|
||||
*
|
||||
* @returns A generator of lexical tokens
|
||||
*/
|
||||
*lex(source, incomplete = false) {
|
||||
if (source) {
|
||||
this.buffer = this.buffer ? this.buffer + source : source;
|
||||
this.lineEndPos = null;
|
||||
}
|
||||
this.atEnd = !incomplete;
|
||||
let next = this.next ?? 'stream';
|
||||
while (next && (incomplete || this.hasChars(1)))
|
||||
next = yield* this.parseNext(next);
|
||||
}
|
||||
atLineEnd() {
|
||||
let i = this.pos;
|
||||
let ch = this.buffer[i];
|
||||
while (ch === ' ' || ch === '\t')
|
||||
ch = this.buffer[++i];
|
||||
if (!ch || ch === '#' || ch === '\n')
|
||||
return true;
|
||||
if (ch === '\r')
|
||||
return this.buffer[i + 1] === '\n';
|
||||
return false;
|
||||
}
|
||||
charAt(n) {
|
||||
return this.buffer[this.pos + n];
|
||||
}
|
||||
continueScalar(offset) {
|
||||
let ch = this.buffer[offset];
|
||||
if (this.indentNext > 0) {
|
||||
let indent = 0;
|
||||
while (ch === ' ')
|
||||
ch = this.buffer[++indent + offset];
|
||||
if (ch === '\r') {
|
||||
const next = this.buffer[indent + offset + 1];
|
||||
if (next === '\n' || (!next && !this.atEnd))
|
||||
return offset + indent + 1;
|
||||
}
|
||||
return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
|
||||
? offset + indent
|
||||
: -1;
|
||||
}
|
||||
if (ch === '-' || ch === '.') {
|
||||
const dt = this.buffer.substr(offset, 3);
|
||||
if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
|
||||
return -1;
|
||||
}
|
||||
return offset;
|
||||
}
|
||||
getLine() {
|
||||
let end = this.lineEndPos;
|
||||
if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {
|
||||
end = this.buffer.indexOf('\n', this.pos);
|
||||
this.lineEndPos = end;
|
||||
}
|
||||
if (end === -1)
|
||||
return this.atEnd ? this.buffer.substring(this.pos) : null;
|
||||
if (this.buffer[end - 1] === '\r')
|
||||
end -= 1;
|
||||
return this.buffer.substring(this.pos, end);
|
||||
}
|
||||
hasChars(n) {
|
||||
return this.pos + n <= this.buffer.length;
|
||||
}
|
||||
setNext(state) {
|
||||
this.buffer = this.buffer.substring(this.pos);
|
||||
this.pos = 0;
|
||||
this.lineEndPos = null;
|
||||
this.next = state;
|
||||
return null;
|
||||
}
|
||||
peek(n) {
|
||||
return this.buffer.substr(this.pos, n);
|
||||
}
|
||||
*parseNext(next) {
|
||||
switch (next) {
|
||||
case 'stream':
|
||||
return yield* this.parseStream();
|
||||
case 'line-start':
|
||||
return yield* this.parseLineStart();
|
||||
case 'block-start':
|
||||
return yield* this.parseBlockStart();
|
||||
case 'doc':
|
||||
return yield* this.parseDocument();
|
||||
case 'flow':
|
||||
return yield* this.parseFlowCollection();
|
||||
case 'quoted-scalar':
|
||||
return yield* this.parseQuotedScalar();
|
||||
case 'block-scalar':
|
||||
return yield* this.parseBlockScalar();
|
||||
case 'plain-scalar':
|
||||
return yield* this.parsePlainScalar();
|
||||
}
|
||||
}
|
||||
*parseStream() {
|
||||
let line = this.getLine();
|
||||
if (line === null)
|
||||
return this.setNext('stream');
|
||||
if (line[0] === BOM) {
|
||||
yield* this.pushCount(1);
|
||||
line = line.substring(1);
|
||||
}
|
||||
if (line[0] === '%') {
|
||||
let dirEnd = line.length;
|
||||
const cs = line.indexOf('#');
|
||||
if (cs !== -1) {
|
||||
const ch = line[cs - 1];
|
||||
if (ch === ' ' || ch === '\t')
|
||||
dirEnd = cs - 1;
|
||||
}
|
||||
while (true) {
|
||||
const ch = line[dirEnd - 1];
|
||||
if (ch === ' ' || ch === '\t')
|
||||
dirEnd -= 1;
|
||||
else
|
||||
break;
|
||||
}
|
||||
const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));
|
||||
yield* this.pushCount(line.length - n); // possible comment
|
||||
this.pushNewline();
|
||||
return 'stream';
|
||||
}
|
||||
if (this.atLineEnd()) {
|
||||
const sp = yield* this.pushSpaces(true);
|
||||
yield* this.pushCount(line.length - sp);
|
||||
yield* this.pushNewline();
|
||||
return 'stream';
|
||||
}
|
||||
yield DOCUMENT;
|
||||
return yield* this.parseLineStart();
|
||||
}
|
||||
*parseLineStart() {
|
||||
const ch = this.charAt(0);
|
||||
if (!ch && !this.atEnd)
|
||||
return this.setNext('line-start');
|
||||
if (ch === '-' || ch === '.') {
|
||||
if (!this.atEnd && !this.hasChars(4))
|
||||
return this.setNext('line-start');
|
||||
const s = this.peek(3);
|
||||
if (s === '---' && isEmpty(this.charAt(3))) {
|
||||
yield* this.pushCount(3);
|
||||
this.indentValue = 0;
|
||||
this.indentNext = 0;
|
||||
return 'doc';
|
||||
}
|
||||
else if (s === '...' && isEmpty(this.charAt(3))) {
|
||||
yield* this.pushCount(3);
|
||||
return 'stream';
|
||||
}
|
||||
}
|
||||
this.indentValue = yield* this.pushSpaces(false);
|
||||
if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
|
||||
this.indentNext = this.indentValue;
|
||||
return yield* this.parseBlockStart();
|
||||
}
|
||||
*parseBlockStart() {
|
||||
const [ch0, ch1] = this.peek(2);
|
||||
if (!ch1 && !this.atEnd)
|
||||
return this.setNext('block-start');
|
||||
if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
|
||||
const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));
|
||||
this.indentNext = this.indentValue + 1;
|
||||
this.indentValue += n;
|
||||
return yield* this.parseBlockStart();
|
||||
}
|
||||
return 'doc';
|
||||
}
|
||||
*parseDocument() {
|
||||
yield* this.pushSpaces(true);
|
||||
const line = this.getLine();
|
||||
if (line === null)
|
||||
return this.setNext('doc');
|
||||
let n = yield* this.pushIndicators();
|
||||
switch (line[n]) {
|
||||
case '#':
|
||||
yield* this.pushCount(line.length - n);
|
||||
// fallthrough
|
||||
case undefined:
|
||||
yield* this.pushNewline();
|
||||
return yield* this.parseLineStart();
|
||||
case '{':
|
||||
case '[':
|
||||
yield* this.pushCount(1);
|
||||
this.flowKey = false;
|
||||
this.flowLevel = 1;
|
||||
return 'flow';
|
||||
case '}':
|
||||
case ']':
|
||||
// this is an error
|
||||
yield* this.pushCount(1);
|
||||
return 'doc';
|
||||
case '*':
|
||||
yield* this.pushUntil(isNotAnchorChar);
|
||||
return 'doc';
|
||||
case '"':
|
||||
case "'":
|
||||
return yield* this.parseQuotedScalar();
|
||||
case '|':
|
||||
case '>':
|
||||
n += yield* this.parseBlockScalarHeader();
|
||||
n += yield* this.pushSpaces(true);
|
||||
yield* this.pushCount(line.length - n);
|
||||
yield* this.pushNewline();
|
||||
return yield* this.parseBlockScalar();
|
||||
default:
|
||||
return yield* this.parsePlainScalar();
|
||||
}
|
||||
}
|
||||
*parseFlowCollection() {
|
||||
let nl, sp;
|
||||
let indent = -1;
|
||||
do {
|
||||
nl = yield* this.pushNewline();
|
||||
if (nl > 0) {
|
||||
sp = yield* this.pushSpaces(false);
|
||||
this.indentValue = indent = sp;
|
||||
}
|
||||
else {
|
||||
sp = 0;
|
||||
}
|
||||
sp += yield* this.pushSpaces(true);
|
||||
} while (nl + sp > 0);
|
||||
const line = this.getLine();
|
||||
if (line === null)
|
||||
return this.setNext('flow');
|
||||
if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||
|
||||
(indent === 0 &&
|
||||
(line.startsWith('---') || line.startsWith('...')) &&
|
||||
isEmpty(line[3]))) {
|
||||
// Allowing for the terminal ] or } at the same (rather than greater)
|
||||
// indent level as the initial [ or { is technically invalid, but
|
||||
// failing here would be surprising to users.
|
||||
const atFlowEndMarker = indent === this.indentNext - 1 &&
|
||||
this.flowLevel === 1 &&
|
||||
(line[0] === ']' || line[0] === '}');
|
||||
if (!atFlowEndMarker) {
|
||||
// this is an error
|
||||
this.flowLevel = 0;
|
||||
yield FLOW_END;
|
||||
return yield* this.parseLineStart();
|
||||
}
|
||||
}
|
||||
let n = 0;
|
||||
while (line[n] === ',') {
|
||||
n += yield* this.pushCount(1);
|
||||
n += yield* this.pushSpaces(true);
|
||||
this.flowKey = false;
|
||||
}
|
||||
n += yield* this.pushIndicators();
|
||||
switch (line[n]) {
|
||||
case undefined:
|
||||
return 'flow';
|
||||
case '#':
|
||||
yield* this.pushCount(line.length - n);
|
||||
return 'flow';
|
||||
case '{':
|
||||
case '[':
|
||||
yield* this.pushCount(1);
|
||||
this.flowKey = false;
|
||||
this.flowLevel += 1;
|
||||
return 'flow';
|
||||
case '}':
|
||||
case ']':
|
||||
yield* this.pushCount(1);
|
||||
this.flowKey = true;
|
||||
this.flowLevel -= 1;
|
||||
return this.flowLevel ? 'flow' : 'doc';
|
||||
case '*':
|
||||
yield* this.pushUntil(isNotAnchorChar);
|
||||
return 'flow';
|
||||
case '"':
|
||||
case "'":
|
||||
this.flowKey = true;
|
||||
return yield* this.parseQuotedScalar();
|
||||
case ':': {
|
||||
const next = this.charAt(1);
|
||||
if (this.flowKey || isEmpty(next) || next === ',') {
|
||||
this.flowKey = false;
|
||||
yield* this.pushCount(1);
|
||||
yield* this.pushSpaces(true);
|
||||
return 'flow';
|
||||
}
|
||||
}
|
||||
// fallthrough
|
||||
default:
|
||||
this.flowKey = false;
|
||||
return yield* this.parsePlainScalar();
|
||||
}
|
||||
}
|
||||
*parseQuotedScalar() {
|
||||
const quote = this.charAt(0);
|
||||
let end = this.buffer.indexOf(quote, this.pos + 1);
|
||||
if (quote === "'") {
|
||||
while (end !== -1 && this.buffer[end + 1] === "'")
|
||||
end = this.buffer.indexOf("'", end + 2);
|
||||
}
|
||||
else {
|
||||
// double-quote
|
||||
while (end !== -1) {
|
||||
let n = 0;
|
||||
while (this.buffer[end - 1 - n] === '\\')
|
||||
n += 1;
|
||||
if (n % 2 === 0)
|
||||
break;
|
||||
end = this.buffer.indexOf('"', end + 1);
|
||||
}
|
||||
}
|
||||
// Only looking for newlines within the quotes
|
||||
const qb = this.buffer.substring(0, end);
|
||||
let nl = qb.indexOf('\n', this.pos);
|
||||
if (nl !== -1) {
|
||||
while (nl !== -1) {
|
||||
const cs = this.continueScalar(nl + 1);
|
||||
if (cs === -1)
|
||||
break;
|
||||
nl = qb.indexOf('\n', cs);
|
||||
}
|
||||
if (nl !== -1) {
|
||||
// this is an error caused by an unexpected unindent
|
||||
end = nl - (qb[nl - 1] === '\r' ? 2 : 1);
|
||||
}
|
||||
}
|
||||
if (end === -1) {
|
||||
if (!this.atEnd)
|
||||
return this.setNext('quoted-scalar');
|
||||
end = this.buffer.length;
|
||||
}
|
||||
yield* this.pushToIndex(end + 1, false);
|
||||
return this.flowLevel ? 'flow' : 'doc';
|
||||
}
|
||||
*parseBlockScalarHeader() {
|
||||
this.blockScalarIndent = -1;
|
||||
this.blockScalarKeep = false;
|
||||
let i = this.pos;
|
||||
while (true) {
|
||||
const ch = this.buffer[++i];
|
||||
if (ch === '+')
|
||||
this.blockScalarKeep = true;
|
||||
else if (ch > '0' && ch <= '9')
|
||||
this.blockScalarIndent = Number(ch) - 1;
|
||||
else if (ch !== '-')
|
||||
break;
|
||||
}
|
||||
return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');
|
||||
}
|
||||
*parseBlockScalar() {
|
||||
let nl = this.pos - 1; // may be -1 if this.pos === 0
|
||||
let indent = 0;
|
||||
let ch;
|
||||
loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
|
||||
switch (ch) {
|
||||
case ' ':
|
||||
indent += 1;
|
||||
break;
|
||||
case '\n':
|
||||
nl = i;
|
||||
indent = 0;
|
||||
break;
|
||||
case '\r': {
|
||||
const next = this.buffer[i + 1];
|
||||
if (!next && !this.atEnd)
|
||||
return this.setNext('block-scalar');
|
||||
if (next === '\n')
|
||||
break;
|
||||
} // fallthrough
|
||||
default:
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
if (!ch && !this.atEnd)
|
||||
return this.setNext('block-scalar');
|
||||
if (indent >= this.indentNext) {
|
||||
if (this.blockScalarIndent === -1)
|
||||
this.indentNext = indent;
|
||||
else
|
||||
this.indentNext += this.blockScalarIndent;
|
||||
do {
|
||||
const cs = this.continueScalar(nl + 1);
|
||||
if (cs === -1)
|
||||
break;
|
||||
nl = this.buffer.indexOf('\n', cs);
|
||||
} while (nl !== -1);
|
||||
if (nl === -1) {
|
||||
if (!this.atEnd)
|
||||
return this.setNext('block-scalar');
|
||||
nl = this.buffer.length;
|
||||
}
|
||||
}
|
||||
if (!this.blockScalarKeep) {
|
||||
do {
|
||||
let i = nl - 1;
|
||||
let ch = this.buffer[i];
|
||||
if (ch === '\r')
|
||||
ch = this.buffer[--i];
|
||||
const lastChar = i; // Drop the line if last char not more indented
|
||||
while (ch === ' ' || ch === '\t')
|
||||
ch = this.buffer[--i];
|
||||
if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
|
||||
nl = i;
|
||||
else
|
||||
break;
|
||||
} while (true);
|
||||
}
|
||||
yield SCALAR;
|
||||
yield* this.pushToIndex(nl + 1, true);
|
||||
return yield* this.parseLineStart();
|
||||
}
|
||||
*parsePlainScalar() {
|
||||
const inFlow = this.flowLevel > 0;
|
||||
let end = this.pos - 1;
|
||||
let i = this.pos - 1;
|
||||
let ch;
|
||||
while ((ch = this.buffer[++i])) {
|
||||
if (ch === ':') {
|
||||
const next = this.buffer[i + 1];
|
||||
if (isEmpty(next) || (inFlow && next === ','))
|
||||
break;
|
||||
end = i;
|
||||
}
|
||||
else if (isEmpty(ch)) {
|
||||
let next = this.buffer[i + 1];
|
||||
if (ch === '\r') {
|
||||
if (next === '\n') {
|
||||
i += 1;
|
||||
ch = '\n';
|
||||
next = this.buffer[i + 1];
|
||||
}
|
||||
else
|
||||
end = i;
|
||||
}
|
||||
if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
|
||||
break;
|
||||
if (ch === '\n') {
|
||||
const cs = this.continueScalar(i + 1);
|
||||
if (cs === -1)
|
||||
break;
|
||||
i = Math.max(i, cs - 2); // to advance, but still account for ' #'
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (inFlow && invalidFlowScalarChars.includes(ch))
|
||||
break;
|
||||
end = i;
|
||||
}
|
||||
}
|
||||
if (!ch && !this.atEnd)
|
||||
return this.setNext('plain-scalar');
|
||||
yield SCALAR;
|
||||
yield* this.pushToIndex(end + 1, true);
|
||||
return inFlow ? 'flow' : 'doc';
|
||||
}
|
||||
*pushCount(n) {
|
||||
if (n > 0) {
|
||||
yield this.buffer.substr(this.pos, n);
|
||||
this.pos += n;
|
||||
return n;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
*pushToIndex(i, allowEmpty) {
|
||||
const s = this.buffer.slice(this.pos, i);
|
||||
if (s) {
|
||||
yield s;
|
||||
this.pos += s.length;
|
||||
return s.length;
|
||||
}
|
||||
else if (allowEmpty)
|
||||
yield '';
|
||||
return 0;
|
||||
}
|
||||
*pushIndicators() {
|
||||
switch (this.charAt(0)) {
|
||||
case '!':
|
||||
return ((yield* this.pushTag()) +
|
||||
(yield* this.pushSpaces(true)) +
|
||||
(yield* this.pushIndicators()));
|
||||
case '&':
|
||||
return ((yield* this.pushUntil(isNotAnchorChar)) +
|
||||
(yield* this.pushSpaces(true)) +
|
||||
(yield* this.pushIndicators()));
|
||||
case '-': // this is an error
|
||||
case '?': // this is an error outside flow collections
|
||||
case ':': {
|
||||
const inFlow = this.flowLevel > 0;
|
||||
const ch1 = this.charAt(1);
|
||||
if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {
|
||||
if (!inFlow)
|
||||
this.indentNext = this.indentValue + 1;
|
||||
else if (this.flowKey)
|
||||
this.flowKey = false;
|
||||
return ((yield* this.pushCount(1)) +
|
||||
(yield* this.pushSpaces(true)) +
|
||||
(yield* this.pushIndicators()));
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
*pushTag() {
|
||||
if (this.charAt(1) === '<') {
|
||||
let i = this.pos + 2;
|
||||
let ch = this.buffer[i];
|
||||
while (!isEmpty(ch) && ch !== '>')
|
||||
ch = this.buffer[++i];
|
||||
return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);
|
||||
}
|
||||
else {
|
||||
let i = this.pos + 1;
|
||||
let ch = this.buffer[i];
|
||||
while (ch) {
|
||||
if (tagChars.includes(ch))
|
||||
ch = this.buffer[++i];
|
||||
else if (ch === '%' &&
|
||||
hexDigits.includes(this.buffer[i + 1]) &&
|
||||
hexDigits.includes(this.buffer[i + 2])) {
|
||||
ch = this.buffer[(i += 3)];
|
||||
}
|
||||
else
|
||||
break;
|
||||
}
|
||||
return yield* this.pushToIndex(i, false);
|
||||
}
|
||||
}
|
||||
*pushNewline() {
|
||||
const ch = this.buffer[this.pos];
|
||||
if (ch === '\n')
|
||||
return yield* this.pushCount(1);
|
||||
else if (ch === '\r' && this.charAt(1) === '\n')
|
||||
return yield* this.pushCount(2);
|
||||
else
|
||||
return 0;
|
||||
}
|
||||
*pushSpaces(allowTabs) {
|
||||
let i = this.pos - 1;
|
||||
let ch;
|
||||
do {
|
||||
ch = this.buffer[++i];
|
||||
} while (ch === ' ' || (allowTabs && ch === '\t'));
|
||||
const n = i - this.pos;
|
||||
if (n > 0) {
|
||||
yield this.buffer.substr(this.pos, n);
|
||||
this.pos = i;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
*pushUntil(test) {
|
||||
let i = this.pos;
|
||||
let ch = this.buffer[i];
|
||||
while (!test(ch))
|
||||
ch = this.buffer[++i];
|
||||
return yield* this.pushToIndex(i, false);
|
||||
}
|
||||
}
|
||||
|
||||
export { Lexer };
|
39
node_modules/yaml/browser/dist/parse/line-counter.js
generated
vendored
Normal file
39
node_modules/yaml/browser/dist/parse/line-counter.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
/**
|
||||
* Tracks newlines during parsing in order to provide an efficient API for
|
||||
* determining the one-indexed `{ line, col }` position for any offset
|
||||
* within the input.
|
||||
*/
|
||||
class LineCounter {
|
||||
constructor() {
|
||||
this.lineStarts = [];
|
||||
/**
|
||||
* Should be called in ascending order. Otherwise, call
|
||||
* `lineCounter.lineStarts.sort()` before calling `linePos()`.
|
||||
*/
|
||||
this.addNewLine = (offset) => this.lineStarts.push(offset);
|
||||
/**
|
||||
* Performs a binary search and returns the 1-indexed { line, col }
|
||||
* position of `offset`. If `line === 0`, `addNewLine` has never been
|
||||
* called or `offset` is before the first known newline.
|
||||
*/
|
||||
this.linePos = (offset) => {
|
||||
let low = 0;
|
||||
let high = this.lineStarts.length;
|
||||
while (low < high) {
|
||||
const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
|
||||
if (this.lineStarts[mid] < offset)
|
||||
low = mid + 1;
|
||||
else
|
||||
high = mid;
|
||||
}
|
||||
if (this.lineStarts[low] === offset)
|
||||
return { line: low + 1, col: 1 };
|
||||
if (low === 0)
|
||||
return { line: 0, col: offset };
|
||||
const start = this.lineStarts[low - 1];
|
||||
return { line: low, col: offset - start + 1 };
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export { LineCounter };
|
950
node_modules/yaml/browser/dist/parse/parser.js
generated
vendored
Normal file
950
node_modules/yaml/browser/dist/parse/parser.js
generated
vendored
Normal file
|
@ -0,0 +1,950 @@
|
|||
import { tokenType } from './cst.js';
|
||||
import { Lexer } from './lexer.js';
|
||||
|
||||
function includesToken(list, type) {
|
||||
for (let i = 0; i < list.length; ++i)
|
||||
if (list[i].type === type)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
function findNonEmptyIndex(list) {
|
||||
for (let i = 0; i < list.length; ++i) {
|
||||
switch (list[i].type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
break;
|
||||
default:
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
function isFlowToken(token) {
|
||||
switch (token?.type) {
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
case 'flow-collection':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
function getPrevProps(parent) {
|
||||
switch (parent.type) {
|
||||
case 'document':
|
||||
return parent.start;
|
||||
case 'block-map': {
|
||||
const it = parent.items[parent.items.length - 1];
|
||||
return it.sep ?? it.start;
|
||||
}
|
||||
case 'block-seq':
|
||||
return parent.items[parent.items.length - 1].start;
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
return [];
|
||||
}
|
||||
}
|
||||
/** Note: May modify input array */
|
||||
function getFirstKeyStartProps(prev) {
|
||||
if (prev.length === 0)
|
||||
return [];
|
||||
let i = prev.length;
|
||||
loop: while (--i >= 0) {
|
||||
switch (prev[i].type) {
|
||||
case 'doc-start':
|
||||
case 'explicit-key-ind':
|
||||
case 'map-value-ind':
|
||||
case 'seq-item-ind':
|
||||
case 'newline':
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
while (prev[++i]?.type === 'space') {
|
||||
/* loop */
|
||||
}
|
||||
return prev.splice(i, prev.length);
|
||||
}
|
||||
function fixFlowSeqItems(fc) {
|
||||
if (fc.start.type === 'flow-seq-start') {
|
||||
for (const it of fc.items) {
|
||||
if (it.sep &&
|
||||
!it.value &&
|
||||
!includesToken(it.start, 'explicit-key-ind') &&
|
||||
!includesToken(it.sep, 'map-value-ind')) {
|
||||
if (it.key)
|
||||
it.value = it.key;
|
||||
delete it.key;
|
||||
if (isFlowToken(it.value)) {
|
||||
if (it.value.end)
|
||||
Array.prototype.push.apply(it.value.end, it.sep);
|
||||
else
|
||||
it.value.end = it.sep;
|
||||
}
|
||||
else
|
||||
Array.prototype.push.apply(it.start, it.sep);
|
||||
delete it.sep;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* A YAML concrete syntax tree (CST) parser
|
||||
*
|
||||
* ```ts
|
||||
* const src: string = ...
|
||||
* for (const token of new Parser().parse(src)) {
|
||||
* // token: Token
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* To use the parser with a user-provided lexer:
|
||||
*
|
||||
* ```ts
|
||||
* function* parse(source: string, lexer: Lexer) {
|
||||
* const parser = new Parser()
|
||||
* for (const lexeme of lexer.lex(source))
|
||||
* yield* parser.next(lexeme)
|
||||
* yield* parser.end()
|
||||
* }
|
||||
*
|
||||
* const src: string = ...
|
||||
* const lexer = new Lexer()
|
||||
* for (const token of parse(src, lexer)) {
|
||||
* // token: Token
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
class Parser {
|
||||
/**
|
||||
* @param onNewLine - If defined, called separately with the start position of
|
||||
* each new line (in `parse()`, including the start of input).
|
||||
*/
|
||||
constructor(onNewLine) {
|
||||
/** If true, space and sequence indicators count as indentation */
|
||||
this.atNewLine = true;
|
||||
/** If true, next token is a scalar value */
|
||||
this.atScalar = false;
|
||||
/** Current indentation level */
|
||||
this.indent = 0;
|
||||
/** Current offset since the start of parsing */
|
||||
this.offset = 0;
|
||||
/** On the same line with a block map key */
|
||||
this.onKeyLine = false;
|
||||
/** Top indicates the node that's currently being built */
|
||||
this.stack = [];
|
||||
/** The source of the current token, set in parse() */
|
||||
this.source = '';
|
||||
/** The type of the current token, set in parse() */
|
||||
this.type = '';
|
||||
// Must be defined after `next()`
|
||||
this.lexer = new Lexer();
|
||||
this.onNewLine = onNewLine;
|
||||
}
|
||||
/**
|
||||
* Parse `source` as a YAML stream.
|
||||
* If `incomplete`, a part of the last line may be left as a buffer for the next call.
|
||||
*
|
||||
* Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
|
||||
*
|
||||
* @returns A generator of tokens representing each directive, document, and other structure.
|
||||
*/
|
||||
*parse(source, incomplete = false) {
|
||||
if (this.onNewLine && this.offset === 0)
|
||||
this.onNewLine(0);
|
||||
for (const lexeme of this.lexer.lex(source, incomplete))
|
||||
yield* this.next(lexeme);
|
||||
if (!incomplete)
|
||||
yield* this.end();
|
||||
}
|
||||
/**
|
||||
* Advance the parser by the `source` of one lexical token.
|
||||
*/
|
||||
*next(source) {
|
||||
this.source = source;
|
||||
if (this.atScalar) {
|
||||
this.atScalar = false;
|
||||
yield* this.step();
|
||||
this.offset += source.length;
|
||||
return;
|
||||
}
|
||||
const type = tokenType(source);
|
||||
if (!type) {
|
||||
const message = `Not a YAML token: ${source}`;
|
||||
yield* this.pop({ type: 'error', offset: this.offset, message, source });
|
||||
this.offset += source.length;
|
||||
}
|
||||
else if (type === 'scalar') {
|
||||
this.atNewLine = false;
|
||||
this.atScalar = true;
|
||||
this.type = 'scalar';
|
||||
}
|
||||
else {
|
||||
this.type = type;
|
||||
yield* this.step();
|
||||
switch (type) {
|
||||
case 'newline':
|
||||
this.atNewLine = true;
|
||||
this.indent = 0;
|
||||
if (this.onNewLine)
|
||||
this.onNewLine(this.offset + source.length);
|
||||
break;
|
||||
case 'space':
|
||||
if (this.atNewLine && source[0] === ' ')
|
||||
this.indent += source.length;
|
||||
break;
|
||||
case 'explicit-key-ind':
|
||||
case 'map-value-ind':
|
||||
case 'seq-item-ind':
|
||||
if (this.atNewLine)
|
||||
this.indent += source.length;
|
||||
break;
|
||||
case 'doc-mode':
|
||||
case 'flow-error-end':
|
||||
return;
|
||||
default:
|
||||
this.atNewLine = false;
|
||||
}
|
||||
this.offset += source.length;
|
||||
}
|
||||
}
|
||||
/** Call at end of input to push out any remaining constructions */
|
||||
*end() {
|
||||
while (this.stack.length > 0)
|
||||
yield* this.pop();
|
||||
}
|
||||
get sourceToken() {
|
||||
const st = {
|
||||
type: this.type,
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
source: this.source
|
||||
};
|
||||
return st;
|
||||
}
|
||||
*step() {
|
||||
const top = this.peek(1);
|
||||
if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
|
||||
while (this.stack.length > 0)
|
||||
yield* this.pop();
|
||||
this.stack.push({
|
||||
type: 'doc-end',
|
||||
offset: this.offset,
|
||||
source: this.source
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (!top)
|
||||
return yield* this.stream();
|
||||
switch (top.type) {
|
||||
case 'document':
|
||||
return yield* this.document(top);
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
return yield* this.scalar(top);
|
||||
case 'block-scalar':
|
||||
return yield* this.blockScalar(top);
|
||||
case 'block-map':
|
||||
return yield* this.blockMap(top);
|
||||
case 'block-seq':
|
||||
return yield* this.blockSequence(top);
|
||||
case 'flow-collection':
|
||||
return yield* this.flowCollection(top);
|
||||
case 'doc-end':
|
||||
return yield* this.documentEnd(top);
|
||||
}
|
||||
/* istanbul ignore next should not happen */
|
||||
yield* this.pop();
|
||||
}
|
||||
peek(n) {
|
||||
return this.stack[this.stack.length - n];
|
||||
}
|
||||
*pop(error) {
|
||||
const token = error ?? this.stack.pop();
|
||||
/* istanbul ignore if should not happen */
|
||||
if (!token) {
|
||||
const message = 'Tried to pop an empty stack';
|
||||
yield { type: 'error', offset: this.offset, source: '', message };
|
||||
}
|
||||
else if (this.stack.length === 0) {
|
||||
yield token;
|
||||
}
|
||||
else {
|
||||
const top = this.peek(1);
|
||||
if (token.type === 'block-scalar') {
|
||||
// Block scalars use their parent rather than header indent
|
||||
token.indent = 'indent' in top ? top.indent : 0;
|
||||
}
|
||||
else if (token.type === 'flow-collection' && top.type === 'document') {
|
||||
// Ignore all indent for top-level flow collections
|
||||
token.indent = 0;
|
||||
}
|
||||
if (token.type === 'flow-collection')
|
||||
fixFlowSeqItems(token);
|
||||
switch (top.type) {
|
||||
case 'document':
|
||||
top.value = token;
|
||||
break;
|
||||
case 'block-scalar':
|
||||
top.props.push(token); // error
|
||||
break;
|
||||
case 'block-map': {
|
||||
const it = top.items[top.items.length - 1];
|
||||
if (it.value) {
|
||||
top.items.push({ start: [], key: token, sep: [] });
|
||||
this.onKeyLine = true;
|
||||
return;
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.value = token;
|
||||
}
|
||||
else {
|
||||
Object.assign(it, { key: token, sep: [] });
|
||||
this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'block-seq': {
|
||||
const it = top.items[top.items.length - 1];
|
||||
if (it.value)
|
||||
top.items.push({ start: [], value: token });
|
||||
else
|
||||
it.value = token;
|
||||
break;
|
||||
}
|
||||
case 'flow-collection': {
|
||||
const it = top.items[top.items.length - 1];
|
||||
if (!it || it.value)
|
||||
top.items.push({ start: [], key: token, sep: [] });
|
||||
else if (it.sep)
|
||||
it.value = token;
|
||||
else
|
||||
Object.assign(it, { key: token, sep: [] });
|
||||
return;
|
||||
}
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
yield* this.pop();
|
||||
yield* this.pop(token);
|
||||
}
|
||||
if ((top.type === 'document' ||
|
||||
top.type === 'block-map' ||
|
||||
top.type === 'block-seq') &&
|
||||
(token.type === 'block-map' || token.type === 'block-seq')) {
|
||||
const last = token.items[token.items.length - 1];
|
||||
if (last &&
|
||||
!last.sep &&
|
||||
!last.value &&
|
||||
last.start.length > 0 &&
|
||||
findNonEmptyIndex(last.start) === -1 &&
|
||||
(token.indent === 0 ||
|
||||
last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
|
||||
if (top.type === 'document')
|
||||
top.end = last.start;
|
||||
else
|
||||
top.items.push({ start: last.start });
|
||||
token.items.splice(-1, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
*stream() {
|
||||
switch (this.type) {
|
||||
case 'directive-line':
|
||||
yield { type: 'directive', offset: this.offset, source: this.source };
|
||||
return;
|
||||
case 'byte-order-mark':
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
yield this.sourceToken;
|
||||
return;
|
||||
case 'doc-mode':
|
||||
case 'doc-start': {
|
||||
const doc = {
|
||||
type: 'document',
|
||||
offset: this.offset,
|
||||
start: []
|
||||
};
|
||||
if (this.type === 'doc-start')
|
||||
doc.start.push(this.sourceToken);
|
||||
this.stack.push(doc);
|
||||
return;
|
||||
}
|
||||
}
|
||||
yield {
|
||||
type: 'error',
|
||||
offset: this.offset,
|
||||
message: `Unexpected ${this.type} token in YAML stream`,
|
||||
source: this.source
|
||||
};
|
||||
}
|
||||
*document(doc) {
|
||||
if (doc.value)
|
||||
return yield* this.lineEnd(doc);
|
||||
switch (this.type) {
|
||||
case 'doc-start': {
|
||||
if (findNonEmptyIndex(doc.start) !== -1) {
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
else
|
||||
doc.start.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
doc.start.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
const bv = this.startBlockValue(doc);
|
||||
if (bv)
|
||||
this.stack.push(bv);
|
||||
else {
|
||||
yield {
|
||||
type: 'error',
|
||||
offset: this.offset,
|
||||
message: `Unexpected ${this.type} token in YAML document`,
|
||||
source: this.source
|
||||
};
|
||||
}
|
||||
}
|
||||
*scalar(scalar) {
|
||||
if (this.type === 'map-value-ind') {
|
||||
const prev = getPrevProps(this.peek(2));
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
let sep;
|
||||
if (scalar.end) {
|
||||
sep = scalar.end;
|
||||
sep.push(this.sourceToken);
|
||||
delete scalar.end;
|
||||
}
|
||||
else
|
||||
sep = [this.sourceToken];
|
||||
const map = {
|
||||
type: 'block-map',
|
||||
offset: scalar.offset,
|
||||
indent: scalar.indent,
|
||||
items: [{ start, key: scalar, sep }]
|
||||
};
|
||||
this.onKeyLine = true;
|
||||
this.stack[this.stack.length - 1] = map;
|
||||
}
|
||||
else
|
||||
yield* this.lineEnd(scalar);
|
||||
}
|
||||
*blockScalar(scalar) {
|
||||
switch (this.type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
scalar.props.push(this.sourceToken);
|
||||
return;
|
||||
case 'scalar':
|
||||
scalar.source = this.source;
|
||||
// block-scalar source includes trailing newline
|
||||
this.atNewLine = true;
|
||||
this.indent = 0;
|
||||
if (this.onNewLine) {
|
||||
let nl = this.source.indexOf('\n') + 1;
|
||||
while (nl !== 0) {
|
||||
this.onNewLine(this.offset + nl);
|
||||
nl = this.source.indexOf('\n', nl) + 1;
|
||||
}
|
||||
}
|
||||
yield* this.pop();
|
||||
break;
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
}
|
||||
*blockMap(map) {
|
||||
const it = map.items[map.items.length - 1];
|
||||
// it.sep is true-ish if pair already has key or : separator
|
||||
switch (this.type) {
|
||||
case 'newline':
|
||||
this.onKeyLine = false;
|
||||
if (it.value) {
|
||||
const end = 'end' in it.value ? it.value.end : undefined;
|
||||
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
|
||||
if (last?.type === 'comment')
|
||||
end?.push(this.sourceToken);
|
||||
else
|
||||
map.items.push({ start: [this.sourceToken] });
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
else {
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
case 'space':
|
||||
case 'comment':
|
||||
if (it.value) {
|
||||
map.items.push({ start: [this.sourceToken] });
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
else {
|
||||
if (this.atIndentedComment(it.start, map.indent)) {
|
||||
const prev = map.items[map.items.length - 2];
|
||||
const end = prev?.value?.end;
|
||||
if (Array.isArray(end)) {
|
||||
Array.prototype.push.apply(end, it.start);
|
||||
end.push(this.sourceToken);
|
||||
map.items.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (this.indent >= map.indent) {
|
||||
const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;
|
||||
// For empty nodes, assign newline-separated not indented empty tokens to following node
|
||||
let start = [];
|
||||
if (atNextItem && it.sep && !it.value) {
|
||||
const nl = [];
|
||||
for (let i = 0; i < it.sep.length; ++i) {
|
||||
const st = it.sep[i];
|
||||
switch (st.type) {
|
||||
case 'newline':
|
||||
nl.push(i);
|
||||
break;
|
||||
case 'space':
|
||||
break;
|
||||
case 'comment':
|
||||
if (st.indent > map.indent)
|
||||
nl.length = 0;
|
||||
break;
|
||||
default:
|
||||
nl.length = 0;
|
||||
}
|
||||
}
|
||||
if (nl.length >= 2)
|
||||
start = it.sep.splice(nl[1]);
|
||||
}
|
||||
switch (this.type) {
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
if (atNextItem || it.value) {
|
||||
start.push(this.sourceToken);
|
||||
map.items.push({ start });
|
||||
this.onKeyLine = true;
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
else {
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
case 'explicit-key-ind':
|
||||
if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
else if (atNextItem || it.value) {
|
||||
start.push(this.sourceToken);
|
||||
map.items.push({ start });
|
||||
}
|
||||
else {
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
this.onKeyLine = true;
|
||||
return;
|
||||
case 'map-value-ind':
|
||||
if (includesToken(it.start, 'explicit-key-ind')) {
|
||||
if (!it.sep) {
|
||||
if (includesToken(it.start, 'newline')) {
|
||||
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else {
|
||||
const start = getFirstKeyStartProps(it.start);
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (it.value) {
|
||||
map.items.push({ start: [], key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else if (includesToken(it.sep, 'map-value-ind')) {
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
else if (isFlowToken(it.key) &&
|
||||
!includesToken(it.sep, 'newline')) {
|
||||
const start = getFirstKeyStartProps(it.start);
|
||||
const key = it.key;
|
||||
const sep = it.sep;
|
||||
sep.push(this.sourceToken);
|
||||
// @ts-expect-error type guard is wrong here
|
||||
delete it.key, delete it.sep;
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key, sep }]
|
||||
});
|
||||
}
|
||||
else if (start.length > 0) {
|
||||
// Not actually at next item
|
||||
it.sep = it.sep.concat(start, this.sourceToken);
|
||||
}
|
||||
else {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (!it.sep) {
|
||||
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else if (it.value || atNextItem) {
|
||||
map.items.push({ start, key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else if (includesToken(it.sep, 'map-value-ind')) {
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start: [], key: null, sep: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
else {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
}
|
||||
this.onKeyLine = true;
|
||||
return;
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar': {
|
||||
const fs = this.flowScalar(this.type);
|
||||
if (atNextItem || it.value) {
|
||||
map.items.push({ start, key: fs, sep: [] });
|
||||
this.onKeyLine = true;
|
||||
}
|
||||
else if (it.sep) {
|
||||
this.stack.push(fs);
|
||||
}
|
||||
else {
|
||||
Object.assign(it, { key: fs, sep: [] });
|
||||
this.onKeyLine = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
const bv = this.startBlockValue(map);
|
||||
if (bv) {
|
||||
if (atNextItem &&
|
||||
bv.type !== 'block-seq' &&
|
||||
includesToken(it.start, 'explicit-key-ind')) {
|
||||
map.items.push({ start });
|
||||
}
|
||||
this.stack.push(bv);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
*blockSequence(seq) {
|
||||
const it = seq.items[seq.items.length - 1];
|
||||
switch (this.type) {
|
||||
case 'newline':
|
||||
if (it.value) {
|
||||
const end = 'end' in it.value ? it.value.end : undefined;
|
||||
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
|
||||
if (last?.type === 'comment')
|
||||
end?.push(this.sourceToken);
|
||||
else
|
||||
seq.items.push({ start: [this.sourceToken] });
|
||||
}
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'space':
|
||||
case 'comment':
|
||||
if (it.value)
|
||||
seq.items.push({ start: [this.sourceToken] });
|
||||
else {
|
||||
if (this.atIndentedComment(it.start, seq.indent)) {
|
||||
const prev = seq.items[seq.items.length - 2];
|
||||
const end = prev?.value?.end;
|
||||
if (Array.isArray(end)) {
|
||||
Array.prototype.push.apply(end, it.start);
|
||||
end.push(this.sourceToken);
|
||||
seq.items.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
if (it.value || this.indent <= seq.indent)
|
||||
break;
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'seq-item-ind':
|
||||
if (this.indent !== seq.indent)
|
||||
break;
|
||||
if (it.value || includesToken(it.start, 'seq-item-ind'))
|
||||
seq.items.push({ start: [this.sourceToken] });
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
if (this.indent > seq.indent) {
|
||||
const bv = this.startBlockValue(seq);
|
||||
if (bv) {
|
||||
this.stack.push(bv);
|
||||
return;
|
||||
}
|
||||
}
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
*flowCollection(fc) {
|
||||
const it = fc.items[fc.items.length - 1];
|
||||
if (this.type === 'flow-error-end') {
|
||||
let top;
|
||||
do {
|
||||
yield* this.pop();
|
||||
top = this.peek(1);
|
||||
} while (top && top.type === 'flow-collection');
|
||||
}
|
||||
else if (fc.end.length === 0) {
|
||||
switch (this.type) {
|
||||
case 'comma':
|
||||
case 'explicit-key-ind':
|
||||
if (!it || it.sep)
|
||||
fc.items.push({ start: [this.sourceToken] });
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'map-value-ind':
|
||||
if (!it || it.value)
|
||||
fc.items.push({ start: [], key: null, sep: [this.sourceToken] });
|
||||
else if (it.sep)
|
||||
it.sep.push(this.sourceToken);
|
||||
else
|
||||
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||||
return;
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
if (!it || it.value)
|
||||
fc.items.push({ start: [this.sourceToken] });
|
||||
else if (it.sep)
|
||||
it.sep.push(this.sourceToken);
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar': {
|
||||
const fs = this.flowScalar(this.type);
|
||||
if (!it || it.value)
|
||||
fc.items.push({ start: [], key: fs, sep: [] });
|
||||
else if (it.sep)
|
||||
this.stack.push(fs);
|
||||
else
|
||||
Object.assign(it, { key: fs, sep: [] });
|
||||
return;
|
||||
}
|
||||
case 'flow-map-end':
|
||||
case 'flow-seq-end':
|
||||
fc.end.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
const bv = this.startBlockValue(fc);
|
||||
/* istanbul ignore else should not happen */
|
||||
if (bv)
|
||||
this.stack.push(bv);
|
||||
else {
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
}
|
||||
else {
|
||||
const parent = this.peek(2);
|
||||
if (parent.type === 'block-map' &&
|
||||
((this.type === 'map-value-ind' && parent.indent === fc.indent) ||
|
||||
(this.type === 'newline' &&
|
||||
!parent.items[parent.items.length - 1].sep))) {
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
else if (this.type === 'map-value-ind' &&
|
||||
parent.type !== 'flow-collection') {
|
||||
const prev = getPrevProps(parent);
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
fixFlowSeqItems(fc);
|
||||
const sep = fc.end.splice(1, fc.end.length);
|
||||
sep.push(this.sourceToken);
|
||||
const map = {
|
||||
type: 'block-map',
|
||||
offset: fc.offset,
|
||||
indent: fc.indent,
|
||||
items: [{ start, key: fc, sep }]
|
||||
};
|
||||
this.onKeyLine = true;
|
||||
this.stack[this.stack.length - 1] = map;
|
||||
}
|
||||
else {
|
||||
yield* this.lineEnd(fc);
|
||||
}
|
||||
}
|
||||
}
|
||||
flowScalar(type) {
|
||||
if (this.onNewLine) {
|
||||
let nl = this.source.indexOf('\n') + 1;
|
||||
while (nl !== 0) {
|
||||
this.onNewLine(this.offset + nl);
|
||||
nl = this.source.indexOf('\n', nl) + 1;
|
||||
}
|
||||
}
|
||||
return {
|
||||
type,
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
source: this.source
|
||||
};
|
||||
}
|
||||
startBlockValue(parent) {
|
||||
switch (this.type) {
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
return this.flowScalar(this.type);
|
||||
case 'block-scalar-header':
|
||||
return {
|
||||
type: 'block-scalar',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
props: [this.sourceToken],
|
||||
source: ''
|
||||
};
|
||||
case 'flow-map-start':
|
||||
case 'flow-seq-start':
|
||||
return {
|
||||
type: 'flow-collection',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
start: this.sourceToken,
|
||||
items: [],
|
||||
end: []
|
||||
};
|
||||
case 'seq-item-ind':
|
||||
return {
|
||||
type: 'block-seq',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start: [this.sourceToken] }]
|
||||
};
|
||||
case 'explicit-key-ind': {
|
||||
this.onKeyLine = true;
|
||||
const prev = getPrevProps(parent);
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
start.push(this.sourceToken);
|
||||
return {
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start }]
|
||||
};
|
||||
}
|
||||
case 'map-value-ind': {
|
||||
this.onKeyLine = true;
|
||||
const prev = getPrevProps(parent);
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
return {
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||||
};
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
atIndentedComment(start, indent) {
|
||||
if (this.type !== 'comment')
|
||||
return false;
|
||||
if (this.indent <= indent)
|
||||
return false;
|
||||
return start.every(st => st.type === 'newline' || st.type === 'space');
|
||||
}
|
||||
*documentEnd(docEnd) {
|
||||
if (this.type !== 'doc-mode') {
|
||||
if (docEnd.end)
|
||||
docEnd.end.push(this.sourceToken);
|
||||
else
|
||||
docEnd.end = [this.sourceToken];
|
||||
if (this.type === 'newline')
|
||||
yield* this.pop();
|
||||
}
|
||||
}
|
||||
*lineEnd(token) {
|
||||
switch (this.type) {
|
||||
case 'comma':
|
||||
case 'doc-start':
|
||||
case 'doc-end':
|
||||
case 'flow-seq-end':
|
||||
case 'flow-map-end':
|
||||
case 'map-value-ind':
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
break;
|
||||
case 'newline':
|
||||
this.onKeyLine = false;
|
||||
// fallthrough
|
||||
case 'space':
|
||||
case 'comment':
|
||||
default:
|
||||
// all other values are errors
|
||||
if (token.end)
|
||||
token.end.push(this.sourceToken);
|
||||
else
|
||||
token.end = [this.sourceToken];
|
||||
if (this.type === 'newline')
|
||||
yield* this.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { Parser };
|
99
node_modules/yaml/browser/dist/public-api.js
generated
vendored
Normal file
99
node_modules/yaml/browser/dist/public-api.js
generated
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
import { Composer } from './compose/composer.js';
|
||||
import { Document } from './doc/Document.js';
|
||||
import { prettifyError, YAMLParseError } from './errors.js';
|
||||
import { warn } from './log.js';
|
||||
import { LineCounter } from './parse/line-counter.js';
|
||||
import { Parser } from './parse/parser.js';
|
||||
|
||||
function parseOptions(options) {
|
||||
const prettyErrors = options.prettyErrors !== false;
|
||||
const lineCounter = options.lineCounter || (prettyErrors && new LineCounter()) || null;
|
||||
return { lineCounter, prettyErrors };
|
||||
}
|
||||
/**
|
||||
* Parse the input as a stream of YAML documents.
|
||||
*
|
||||
* Documents should be separated from each other by `...` or `---` marker lines.
|
||||
*
|
||||
* @returns If an empty `docs` array is returned, it will be of type
|
||||
* EmptyStream and contain additional stream information. In
|
||||
* TypeScript, you should use `'empty' in docs` as a type guard for it.
|
||||
*/
|
||||
function parseAllDocuments(source, options = {}) {
|
||||
const { lineCounter, prettyErrors } = parseOptions(options);
|
||||
const parser = new Parser(lineCounter?.addNewLine);
|
||||
const composer = new Composer(options);
|
||||
const docs = Array.from(composer.compose(parser.parse(source)));
|
||||
if (prettyErrors && lineCounter)
|
||||
for (const doc of docs) {
|
||||
doc.errors.forEach(prettifyError(source, lineCounter));
|
||||
doc.warnings.forEach(prettifyError(source, lineCounter));
|
||||
}
|
||||
if (docs.length > 0)
|
||||
return docs;
|
||||
return Object.assign([], { empty: true }, composer.streamInfo());
|
||||
}
|
||||
/** Parse an input string into a single YAML.Document */
|
||||
function parseDocument(source, options = {}) {
|
||||
const { lineCounter, prettyErrors } = parseOptions(options);
|
||||
const parser = new Parser(lineCounter?.addNewLine);
|
||||
const composer = new Composer(options);
|
||||
// `doc` is always set by compose.end(true) at the very latest
|
||||
let doc = null;
|
||||
for (const _doc of composer.compose(parser.parse(source), true, source.length)) {
|
||||
if (!doc)
|
||||
doc = _doc;
|
||||
else if (doc.options.logLevel !== 'silent') {
|
||||
doc.errors.push(new YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (prettyErrors && lineCounter) {
|
||||
doc.errors.forEach(prettifyError(source, lineCounter));
|
||||
doc.warnings.forEach(prettifyError(source, lineCounter));
|
||||
}
|
||||
return doc;
|
||||
}
|
||||
function parse(src, reviver, options) {
|
||||
let _reviver = undefined;
|
||||
if (typeof reviver === 'function') {
|
||||
_reviver = reviver;
|
||||
}
|
||||
else if (options === undefined && reviver && typeof reviver === 'object') {
|
||||
options = reviver;
|
||||
}
|
||||
const doc = parseDocument(src, options);
|
||||
if (!doc)
|
||||
return null;
|
||||
doc.warnings.forEach(warning => warn(doc.options.logLevel, warning));
|
||||
if (doc.errors.length > 0) {
|
||||
if (doc.options.logLevel !== 'silent')
|
||||
throw doc.errors[0];
|
||||
else
|
||||
doc.errors = [];
|
||||
}
|
||||
return doc.toJS(Object.assign({ reviver: _reviver }, options));
|
||||
}
|
||||
function stringify(value, replacer, options) {
|
||||
let _replacer = null;
|
||||
if (typeof replacer === 'function' || Array.isArray(replacer)) {
|
||||
_replacer = replacer;
|
||||
}
|
||||
else if (options === undefined && replacer) {
|
||||
options = replacer;
|
||||
}
|
||||
if (typeof options === 'string')
|
||||
options = options.length;
|
||||
if (typeof options === 'number') {
|
||||
const indent = Math.round(options);
|
||||
options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };
|
||||
}
|
||||
if (value === undefined) {
|
||||
const { keepUndefined } = options ?? replacer ?? {};
|
||||
if (!keepUndefined)
|
||||
return undefined;
|
||||
}
|
||||
return new Document(value, _replacer, options).toString(options);
|
||||
}
|
||||
|
||||
export { parse, parseAllDocuments, parseDocument, stringify };
|
38
node_modules/yaml/browser/dist/schema/Schema.js
generated
vendored
Normal file
38
node_modules/yaml/browser/dist/schema/Schema.js
generated
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
import { MAP, SCALAR, SEQ } from '../nodes/Node.js';
|
||||
import { map } from './common/map.js';
|
||||
import { seq } from './common/seq.js';
|
||||
import { string } from './common/string.js';
|
||||
import { getTags, coreKnownTags } from './tags.js';
|
||||
|
||||
const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;
|
||||
class Schema {
|
||||
constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {
|
||||
this.compat = Array.isArray(compat)
|
||||
? getTags(compat, 'compat')
|
||||
: compat
|
||||
? getTags(null, compat)
|
||||
: null;
|
||||
this.merge = !!merge;
|
||||
this.name = (typeof schema === 'string' && schema) || 'core';
|
||||
this.knownTags = resolveKnownTags ? coreKnownTags : {};
|
||||
this.tags = getTags(customTags, this.name);
|
||||
this.toStringOptions = toStringDefaults ?? null;
|
||||
Object.defineProperty(this, MAP, { value: map });
|
||||
Object.defineProperty(this, SCALAR, { value: string });
|
||||
Object.defineProperty(this, SEQ, { value: seq });
|
||||
// Used by createMap()
|
||||
this.sortMapEntries =
|
||||
typeof sortMapEntries === 'function'
|
||||
? sortMapEntries
|
||||
: sortMapEntries === true
|
||||
? sortMapEntriesByKey
|
||||
: null;
|
||||
}
|
||||
clone() {
|
||||
const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));
|
||||
copy.tags = this.tags.slice();
|
||||
return copy;
|
||||
}
|
||||
}
|
||||
|
||||
export { Schema };
|
42
node_modules/yaml/browser/dist/schema/common/map.js
generated
vendored
Normal file
42
node_modules/yaml/browser/dist/schema/common/map.js
generated
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
import { isMap } from '../../nodes/Node.js';
|
||||
import { createPair } from '../../nodes/Pair.js';
|
||||
import { YAMLMap } from '../../nodes/YAMLMap.js';
|
||||
|
||||
function createMap(schema, obj, ctx) {
|
||||
const { keepUndefined, replacer } = ctx;
|
||||
const map = new YAMLMap(schema);
|
||||
const add = (key, value) => {
|
||||
if (typeof replacer === 'function')
|
||||
value = replacer.call(obj, key, value);
|
||||
else if (Array.isArray(replacer) && !replacer.includes(key))
|
||||
return;
|
||||
if (value !== undefined || keepUndefined)
|
||||
map.items.push(createPair(key, value, ctx));
|
||||
};
|
||||
if (obj instanceof Map) {
|
||||
for (const [key, value] of obj)
|
||||
add(key, value);
|
||||
}
|
||||
else if (obj && typeof obj === 'object') {
|
||||
for (const key of Object.keys(obj))
|
||||
add(key, obj[key]);
|
||||
}
|
||||
if (typeof schema.sortMapEntries === 'function') {
|
||||
map.items.sort(schema.sortMapEntries);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
const map = {
|
||||
collection: 'map',
|
||||
createNode: createMap,
|
||||
default: true,
|
||||
nodeClass: YAMLMap,
|
||||
tag: 'tag:yaml.org,2002:map',
|
||||
resolve(map, onError) {
|
||||
if (!isMap(map))
|
||||
onError('Expected a mapping for this tag');
|
||||
return map;
|
||||
}
|
||||
};
|
||||
|
||||
export { map };
|
15
node_modules/yaml/browser/dist/schema/common/null.js
generated
vendored
Normal file
15
node_modules/yaml/browser/dist/schema/common/null.js
generated
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
import { Scalar } from '../../nodes/Scalar.js';
|
||||
|
||||
const nullTag = {
|
||||
identify: value => value == null,
|
||||
createNode: () => new Scalar(null),
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:null',
|
||||
test: /^(?:~|[Nn]ull|NULL)?$/,
|
||||
resolve: () => new Scalar(null),
|
||||
stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)
|
||||
? source
|
||||
: ctx.options.nullStr
|
||||
};
|
||||
|
||||
export { nullTag };
|
33
node_modules/yaml/browser/dist/schema/common/seq.js
generated
vendored
Normal file
33
node_modules/yaml/browser/dist/schema/common/seq.js
generated
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
import { createNode } from '../../doc/createNode.js';
|
||||
import { isSeq } from '../../nodes/Node.js';
|
||||
import { YAMLSeq } from '../../nodes/YAMLSeq.js';
|
||||
|
||||
function createSeq(schema, obj, ctx) {
|
||||
const { replacer } = ctx;
|
||||
const seq = new YAMLSeq(schema);
|
||||
if (obj && Symbol.iterator in Object(obj)) {
|
||||
let i = 0;
|
||||
for (let it of obj) {
|
||||
if (typeof replacer === 'function') {
|
||||
const key = obj instanceof Set ? it : String(i++);
|
||||
it = replacer.call(obj, key, it);
|
||||
}
|
||||
seq.items.push(createNode(it, undefined, ctx));
|
||||
}
|
||||
}
|
||||
return seq;
|
||||
}
|
||||
const seq = {
|
||||
collection: 'seq',
|
||||
createNode: createSeq,
|
||||
default: true,
|
||||
nodeClass: YAMLSeq,
|
||||
tag: 'tag:yaml.org,2002:seq',
|
||||
resolve(seq, onError) {
|
||||
if (!isSeq(seq))
|
||||
onError('Expected a sequence for this tag');
|
||||
return seq;
|
||||
}
|
||||
};
|
||||
|
||||
export { seq };
|
14
node_modules/yaml/browser/dist/schema/common/string.js
generated
vendored
Normal file
14
node_modules/yaml/browser/dist/schema/common/string.js
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
import { stringifyString } from '../../stringify/stringifyString.js';
|
||||
|
||||
const string = {
|
||||
identify: value => typeof value === 'string',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:str',
|
||||
resolve: str => str,
|
||||
stringify(item, ctx, onComment, onChompKeep) {
|
||||
ctx = Object.assign({ actualString: true }, ctx);
|
||||
return stringifyString(item, ctx, onComment, onChompKeep);
|
||||
}
|
||||
};
|
||||
|
||||
export { string };
|
19
node_modules/yaml/browser/dist/schema/core/bool.js
generated
vendored
Normal file
19
node_modules/yaml/browser/dist/schema/core/bool.js
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
import { Scalar } from '../../nodes/Scalar.js';
|
||||
|
||||
const boolTag = {
|
||||
identify: value => typeof value === 'boolean',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,
|
||||
resolve: str => new Scalar(str[0] === 't' || str[0] === 'T'),
|
||||
stringify({ source, value }, ctx) {
|
||||
if (source && boolTag.test.test(source)) {
|
||||
const sv = source[0] === 't' || source[0] === 'T';
|
||||
if (value === sv)
|
||||
return source;
|
||||
}
|
||||
return value ? ctx.options.trueStr : ctx.options.falseStr;
|
||||
}
|
||||
};
|
||||
|
||||
export { boolTag };
|
43
node_modules/yaml/browser/dist/schema/core/float.js
generated
vendored
Normal file
43
node_modules/yaml/browser/dist/schema/core/float.js
generated
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
import { Scalar } from '../../nodes/Scalar.js';
|
||||
import { stringifyNumber } from '../../stringify/stringifyNumber.js';
|
||||
|
||||
const floatNaN = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^(?:[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN))$/,
|
||||
resolve: str => str.slice(-3).toLowerCase() === 'nan'
|
||||
? NaN
|
||||
: str[0] === '-'
|
||||
? Number.NEGATIVE_INFINITY
|
||||
: Number.POSITIVE_INFINITY,
|
||||
stringify: stringifyNumber
|
||||
};
|
||||
const floatExp = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
format: 'EXP',
|
||||
test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/,
|
||||
resolve: str => parseFloat(str),
|
||||
stringify(node) {
|
||||
const num = Number(node.value);
|
||||
return isFinite(num) ? num.toExponential() : stringifyNumber(node);
|
||||
}
|
||||
};
|
||||
const float = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/,
|
||||
resolve(str) {
|
||||
const node = new Scalar(parseFloat(str));
|
||||
const dot = str.indexOf('.');
|
||||
if (dot !== -1 && str[str.length - 1] === '0')
|
||||
node.minFractionDigits = str.length - dot - 1;
|
||||
return node;
|
||||
},
|
||||
stringify: stringifyNumber
|
||||
};
|
||||
|
||||
export { float, floatExp, floatNaN };
|
38
node_modules/yaml/browser/dist/schema/core/int.js
generated
vendored
Normal file
38
node_modules/yaml/browser/dist/schema/core/int.js
generated
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
import { stringifyNumber } from '../../stringify/stringifyNumber.js';
|
||||
|
||||
const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
|
||||
const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));
|
||||
function intStringify(node, radix, prefix) {
|
||||
const { value } = node;
|
||||
if (intIdentify(value) && value >= 0)
|
||||
return prefix + value.toString(radix);
|
||||
return stringifyNumber(node);
|
||||
}
|
||||
const intOct = {
|
||||
identify: value => intIdentify(value) && value >= 0,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'OCT',
|
||||
test: /^0o[0-7]+$/,
|
||||
resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),
|
||||
stringify: node => intStringify(node, 8, '0o')
|
||||
};
|
||||
const int = {
|
||||
identify: intIdentify,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
test: /^[-+]?[0-9]+$/,
|
||||
resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
|
||||
stringify: stringifyNumber
|
||||
};
|
||||
const intHex = {
|
||||
identify: value => intIdentify(value) && value >= 0,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'HEX',
|
||||
test: /^0x[0-9a-fA-F]+$/,
|
||||
resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
|
||||
stringify: node => intStringify(node, 16, '0x')
|
||||
};
|
||||
|
||||
export { int, intHex, intOct };
|
23
node_modules/yaml/browser/dist/schema/core/schema.js
generated
vendored
Normal file
23
node_modules/yaml/browser/dist/schema/core/schema.js
generated
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
import { map } from '../common/map.js';
|
||||
import { nullTag } from '../common/null.js';
|
||||
import { seq } from '../common/seq.js';
|
||||
import { string } from '../common/string.js';
|
||||
import { boolTag } from './bool.js';
|
||||
import { floatNaN, floatExp, float } from './float.js';
|
||||
import { intOct, int, intHex } from './int.js';
|
||||
|
||||
const schema = [
|
||||
map,
|
||||
seq,
|
||||
string,
|
||||
nullTag,
|
||||
boolTag,
|
||||
intOct,
|
||||
int,
|
||||
intHex,
|
||||
floatNaN,
|
||||
floatExp,
|
||||
float
|
||||
];
|
||||
|
||||
export { schema };
|
62
node_modules/yaml/browser/dist/schema/json/schema.js
generated
vendored
Normal file
62
node_modules/yaml/browser/dist/schema/json/schema.js
generated
vendored
Normal file
|
@ -0,0 +1,62 @@
|
|||
import { Scalar } from '../../nodes/Scalar.js';
|
||||
import { map } from '../common/map.js';
|
||||
import { seq } from '../common/seq.js';
|
||||
|
||||
function intIdentify(value) {
|
||||
return typeof value === 'bigint' || Number.isInteger(value);
|
||||
}
|
||||
const stringifyJSON = ({ value }) => JSON.stringify(value);
|
||||
const jsonScalars = [
|
||||
{
|
||||
identify: value => typeof value === 'string',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:str',
|
||||
resolve: str => str,
|
||||
stringify: stringifyJSON
|
||||
},
|
||||
{
|
||||
identify: value => value == null,
|
||||
createNode: () => new Scalar(null),
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:null',
|
||||
test: /^null$/,
|
||||
resolve: () => null,
|
||||
stringify: stringifyJSON
|
||||
},
|
||||
{
|
||||
identify: value => typeof value === 'boolean',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^true|false$/,
|
||||
resolve: str => str === 'true',
|
||||
stringify: stringifyJSON
|
||||
},
|
||||
{
|
||||
identify: intIdentify,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
test: /^-?(?:0|[1-9][0-9]*)$/,
|
||||
resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),
|
||||
stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)
|
||||
},
|
||||
{
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,
|
||||
resolve: str => parseFloat(str),
|
||||
stringify: stringifyJSON
|
||||
}
|
||||
];
|
||||
const jsonError = {
|
||||
default: true,
|
||||
tag: '',
|
||||
test: /^/,
|
||||
resolve(str, onError) {
|
||||
onError(`Unresolved plain scalar ${JSON.stringify(str)}`);
|
||||
return str;
|
||||
}
|
||||
};
|
||||
const schema = [map, seq].concat(jsonScalars, jsonError);
|
||||
|
||||
export { schema };
|
83
node_modules/yaml/browser/dist/schema/tags.js
generated
vendored
Normal file
83
node_modules/yaml/browser/dist/schema/tags.js
generated
vendored
Normal file
|
@ -0,0 +1,83 @@
|
|||
import { map } from './common/map.js';
|
||||
import { nullTag } from './common/null.js';
|
||||
import { seq } from './common/seq.js';
|
||||
import { string } from './common/string.js';
|
||||
import { boolTag } from './core/bool.js';
|
||||
import { float, floatExp, floatNaN } from './core/float.js';
|
||||
import { int, intHex, intOct } from './core/int.js';
|
||||
import { schema } from './core/schema.js';
|
||||
import { schema as schema$1 } from './json/schema.js';
|
||||
import { binary } from './yaml-1.1/binary.js';
|
||||
import { omap } from './yaml-1.1/omap.js';
|
||||
import { pairs } from './yaml-1.1/pairs.js';
|
||||
import { schema as schema$2 } from './yaml-1.1/schema.js';
|
||||
import { set } from './yaml-1.1/set.js';
|
||||
import { floatTime, intTime, timestamp } from './yaml-1.1/timestamp.js';
|
||||
|
||||
const schemas = new Map([
|
||||
['core', schema],
|
||||
['failsafe', [map, seq, string]],
|
||||
['json', schema$1],
|
||||
['yaml11', schema$2],
|
||||
['yaml-1.1', schema$2]
|
||||
]);
|
||||
const tagsByName = {
|
||||
binary,
|
||||
bool: boolTag,
|
||||
float,
|
||||
floatExp,
|
||||
floatNaN,
|
||||
floatTime,
|
||||
int,
|
||||
intHex,
|
||||
intOct,
|
||||
intTime,
|
||||
map,
|
||||
null: nullTag,
|
||||
omap,
|
||||
pairs,
|
||||
seq,
|
||||
set,
|
||||
timestamp
|
||||
};
|
||||
const coreKnownTags = {
|
||||
'tag:yaml.org,2002:binary': binary,
|
||||
'tag:yaml.org,2002:omap': omap,
|
||||
'tag:yaml.org,2002:pairs': pairs,
|
||||
'tag:yaml.org,2002:set': set,
|
||||
'tag:yaml.org,2002:timestamp': timestamp
|
||||
};
|
||||
function getTags(customTags, schemaName) {
|
||||
let tags = schemas.get(schemaName);
|
||||
if (!tags) {
|
||||
if (Array.isArray(customTags))
|
||||
tags = [];
|
||||
else {
|
||||
const keys = Array.from(schemas.keys())
|
||||
.filter(key => key !== 'yaml11')
|
||||
.map(key => JSON.stringify(key))
|
||||
.join(', ');
|
||||
throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`);
|
||||
}
|
||||
}
|
||||
if (Array.isArray(customTags)) {
|
||||
for (const tag of customTags)
|
||||
tags = tags.concat(tag);
|
||||
}
|
||||
else if (typeof customTags === 'function') {
|
||||
tags = customTags(tags.slice());
|
||||
}
|
||||
return tags.map(tag => {
|
||||
if (typeof tag !== 'string')
|
||||
return tag;
|
||||
const tagObj = tagsByName[tag];
|
||||
if (tagObj)
|
||||
return tagObj;
|
||||
const keys = Object.keys(tagsByName)
|
||||
.map(key => JSON.stringify(key))
|
||||
.join(', ');
|
||||
throw new Error(`Unknown custom tag "${tag}"; use one of ${keys}`);
|
||||
});
|
||||
}
|
||||
|
||||
export { coreKnownTags, getTags };
|
66
node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js
generated
vendored
Normal file
66
node_modules/yaml/browser/dist/schema/yaml-1.1/binary.js
generated
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
import { Scalar } from '../../nodes/Scalar.js';
|
||||
import { stringifyString } from '../../stringify/stringifyString.js';
|
||||
|
||||
const binary = {
|
||||
identify: value => value instanceof Uint8Array,
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:binary',
|
||||
/**
|
||||
* Returns a Buffer in node and an Uint8Array in browsers
|
||||
*
|
||||
* To use the resulting buffer as an image, you'll want to do something like:
|
||||
*
|
||||
* const blob = new Blob([buffer], { type: 'image/jpeg' })
|
||||
* document.querySelector('#photo').src = URL.createObjectURL(blob)
|
||||
*/
|
||||
resolve(src, onError) {
|
||||
if (typeof Buffer === 'function') {
|
||||
return Buffer.from(src, 'base64');
|
||||
}
|
||||
else if (typeof atob === 'function') {
|
||||
// On IE 11, atob() can't handle newlines
|
||||
const str = atob(src.replace(/[\n\r]/g, ''));
|
||||
const buffer = new Uint8Array(str.length);
|
||||
for (let i = 0; i < str.length; ++i)
|
||||
buffer[i] = str.charCodeAt(i);
|
||||
return buffer;
|
||||
}
|
||||
else {
|
||||
onError('This environment does not support reading binary tags; either Buffer or atob is required');
|
||||
return src;
|
||||
}
|
||||
},
|
||||
stringify({ comment, type, value }, ctx, onComment, onChompKeep) {
|
||||
const buf = value; // checked earlier by binary.identify()
|
||||
let str;
|
||||
if (typeof Buffer === 'function') {
|
||||
str =
|
||||
buf instanceof Buffer
|
||||
? buf.toString('base64')
|
||||
: Buffer.from(buf.buffer).toString('base64');
|
||||
}
|
||||
else if (typeof btoa === 'function') {
|
||||
let s = '';
|
||||
for (let i = 0; i < buf.length; ++i)
|
||||
s += String.fromCharCode(buf[i]);
|
||||
str = btoa(s);
|
||||
}
|
||||
else {
|
||||
throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');
|
||||
}
|
||||
if (!type)
|
||||
type = Scalar.BLOCK_LITERAL;
|
||||
if (type !== Scalar.QUOTE_DOUBLE) {
|
||||
const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);
|
||||
const n = Math.ceil(str.length / lineWidth);
|
||||
const lines = new Array(n);
|
||||
for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {
|
||||
lines[i] = str.substr(o, lineWidth);
|
||||
}
|
||||
str = lines.join(type === Scalar.BLOCK_LITERAL ? '\n' : ' ');
|
||||
}
|
||||
return stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);
|
||||
}
|
||||
};
|
||||
|
||||
export { binary };
|
26
node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js
generated
vendored
Normal file
26
node_modules/yaml/browser/dist/schema/yaml-1.1/bool.js
generated
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
import { Scalar } from '../../nodes/Scalar.js';
|
||||
|
||||
function boolStringify({ value, source }, ctx) {
|
||||
const boolObj = value ? trueTag : falseTag;
|
||||
if (source && boolObj.test.test(source))
|
||||
return source;
|
||||
return value ? ctx.options.trueStr : ctx.options.falseStr;
|
||||
}
|
||||
const trueTag = {
|
||||
identify: value => value === true,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,
|
||||
resolve: () => new Scalar(true),
|
||||
stringify: boolStringify
|
||||
};
|
||||
const falseTag = {
|
||||
identify: value => value === false,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:bool',
|
||||
test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/i,
|
||||
resolve: () => new Scalar(false),
|
||||
stringify: boolStringify
|
||||
};
|
||||
|
||||
export { falseTag, trueTag };
|
46
node_modules/yaml/browser/dist/schema/yaml-1.1/float.js
generated
vendored
Normal file
46
node_modules/yaml/browser/dist/schema/yaml-1.1/float.js
generated
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
import { Scalar } from '../../nodes/Scalar.js';
|
||||
import { stringifyNumber } from '../../stringify/stringifyNumber.js';
|
||||
|
||||
const floatNaN = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^[-+]?\.(?:inf|Inf|INF|nan|NaN|NAN)$/,
|
||||
resolve: (str) => str.slice(-3).toLowerCase() === 'nan'
|
||||
? NaN
|
||||
: str[0] === '-'
|
||||
? Number.NEGATIVE_INFINITY
|
||||
: Number.POSITIVE_INFINITY,
|
||||
stringify: stringifyNumber
|
||||
};
|
||||
const floatExp = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
format: 'EXP',
|
||||
test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/,
|
||||
resolve: (str) => parseFloat(str.replace(/_/g, '')),
|
||||
stringify(node) {
|
||||
const num = Number(node.value);
|
||||
return isFinite(num) ? num.toExponential() : stringifyNumber(node);
|
||||
}
|
||||
};
|
||||
const float = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/,
|
||||
resolve(str) {
|
||||
const node = new Scalar(parseFloat(str.replace(/_/g, '')));
|
||||
const dot = str.indexOf('.');
|
||||
if (dot !== -1) {
|
||||
const f = str.substring(dot + 1).replace(/_/g, '');
|
||||
if (f[f.length - 1] === '0')
|
||||
node.minFractionDigits = f.length;
|
||||
}
|
||||
return node;
|
||||
},
|
||||
stringify: stringifyNumber
|
||||
};
|
||||
|
||||
export { float, floatExp, floatNaN };
|
71
node_modules/yaml/browser/dist/schema/yaml-1.1/int.js
generated
vendored
Normal file
71
node_modules/yaml/browser/dist/schema/yaml-1.1/int.js
generated
vendored
Normal file
|
@ -0,0 +1,71 @@
|
|||
import { stringifyNumber } from '../../stringify/stringifyNumber.js';
|
||||
|
||||
const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);
|
||||
function intResolve(str, offset, radix, { intAsBigInt }) {
|
||||
const sign = str[0];
|
||||
if (sign === '-' || sign === '+')
|
||||
offset += 1;
|
||||
str = str.substring(offset).replace(/_/g, '');
|
||||
if (intAsBigInt) {
|
||||
switch (radix) {
|
||||
case 2:
|
||||
str = `0b${str}`;
|
||||
break;
|
||||
case 8:
|
||||
str = `0o${str}`;
|
||||
break;
|
||||
case 16:
|
||||
str = `0x${str}`;
|
||||
break;
|
||||
}
|
||||
const n = BigInt(str);
|
||||
return sign === '-' ? BigInt(-1) * n : n;
|
||||
}
|
||||
const n = parseInt(str, radix);
|
||||
return sign === '-' ? -1 * n : n;
|
||||
}
|
||||
function intStringify(node, radix, prefix) {
|
||||
const { value } = node;
|
||||
if (intIdentify(value)) {
|
||||
const str = value.toString(radix);
|
||||
return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;
|
||||
}
|
||||
return stringifyNumber(node);
|
||||
}
|
||||
const intBin = {
|
||||
identify: intIdentify,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'BIN',
|
||||
test: /^[-+]?0b[0-1_]+$/,
|
||||
resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),
|
||||
stringify: node => intStringify(node, 2, '0b')
|
||||
};
|
||||
const intOct = {
|
||||
identify: intIdentify,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'OCT',
|
||||
test: /^[-+]?0[0-7_]+$/,
|
||||
resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),
|
||||
stringify: node => intStringify(node, 8, '0')
|
||||
};
|
||||
const int = {
|
||||
identify: intIdentify,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
test: /^[-+]?[0-9][0-9_]*$/,
|
||||
resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),
|
||||
stringify: stringifyNumber
|
||||
};
|
||||
const intHex = {
|
||||
identify: intIdentify,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'HEX',
|
||||
test: /^[-+]?0x[0-9a-fA-F_]+$/,
|
||||
resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),
|
||||
stringify: node => intStringify(node, 16, '0x')
|
||||
};
|
||||
|
||||
export { int, intBin, intHex, intOct };
|
73
node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js
generated
vendored
Normal file
73
node_modules/yaml/browser/dist/schema/yaml-1.1/omap.js
generated
vendored
Normal file
|
@ -0,0 +1,73 @@
|
|||
import { YAMLSeq } from '../../nodes/YAMLSeq.js';
|
||||
import { toJS } from '../../nodes/toJS.js';
|
||||
import { isScalar, isPair } from '../../nodes/Node.js';
|
||||
import { YAMLMap } from '../../nodes/YAMLMap.js';
|
||||
import { resolvePairs, createPairs } from './pairs.js';
|
||||
|
||||
class YAMLOMap extends YAMLSeq {
|
||||
constructor() {
|
||||
super();
|
||||
this.add = YAMLMap.prototype.add.bind(this);
|
||||
this.delete = YAMLMap.prototype.delete.bind(this);
|
||||
this.get = YAMLMap.prototype.get.bind(this);
|
||||
this.has = YAMLMap.prototype.has.bind(this);
|
||||
this.set = YAMLMap.prototype.set.bind(this);
|
||||
this.tag = YAMLOMap.tag;
|
||||
}
|
||||
/**
|
||||
* If `ctx` is given, the return type is actually `Map<unknown, unknown>`,
|
||||
* but TypeScript won't allow widening the signature of a child method.
|
||||
*/
|
||||
toJSON(_, ctx) {
|
||||
if (!ctx)
|
||||
return super.toJSON(_);
|
||||
const map = new Map();
|
||||
if (ctx?.onCreate)
|
||||
ctx.onCreate(map);
|
||||
for (const pair of this.items) {
|
||||
let key, value;
|
||||
if (isPair(pair)) {
|
||||
key = toJS(pair.key, '', ctx);
|
||||
value = toJS(pair.value, key, ctx);
|
||||
}
|
||||
else {
|
||||
key = toJS(pair, '', ctx);
|
||||
}
|
||||
if (map.has(key))
|
||||
throw new Error('Ordered maps must not include duplicate keys');
|
||||
map.set(key, value);
|
||||
}
|
||||
return map;
|
||||
}
|
||||
}
|
||||
YAMLOMap.tag = 'tag:yaml.org,2002:omap';
|
||||
const omap = {
|
||||
collection: 'seq',
|
||||
identify: value => value instanceof Map,
|
||||
nodeClass: YAMLOMap,
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:omap',
|
||||
resolve(seq, onError) {
|
||||
const pairs = resolvePairs(seq, onError);
|
||||
const seenKeys = [];
|
||||
for (const { key } of pairs.items) {
|
||||
if (isScalar(key)) {
|
||||
if (seenKeys.includes(key.value)) {
|
||||
onError(`Ordered maps must not include duplicate keys: ${key.value}`);
|
||||
}
|
||||
else {
|
||||
seenKeys.push(key.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return Object.assign(new YAMLOMap(), pairs);
|
||||
},
|
||||
createNode(schema, iterable, ctx) {
|
||||
const pairs = createPairs(schema, iterable, ctx);
|
||||
const omap = new YAMLOMap();
|
||||
omap.items = pairs.items;
|
||||
return omap;
|
||||
}
|
||||
};
|
||||
|
||||
export { YAMLOMap, omap };
|
77
node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js
generated
vendored
Normal file
77
node_modules/yaml/browser/dist/schema/yaml-1.1/pairs.js
generated
vendored
Normal file
|
@ -0,0 +1,77 @@
|
|||
import { isSeq, isPair, isMap } from '../../nodes/Node.js';
|
||||
import { Pair, createPair } from '../../nodes/Pair.js';
|
||||
import { Scalar } from '../../nodes/Scalar.js';
|
||||
import { YAMLSeq } from '../../nodes/YAMLSeq.js';
|
||||
|
||||
function resolvePairs(seq, onError) {
|
||||
if (isSeq(seq)) {
|
||||
for (let i = 0; i < seq.items.length; ++i) {
|
||||
let item = seq.items[i];
|
||||
if (isPair(item))
|
||||
continue;
|
||||
else if (isMap(item)) {
|
||||
if (item.items.length > 1)
|
||||
onError('Each pair must have its own sequence indicator');
|
||||
const pair = item.items[0] || new Pair(new Scalar(null));
|
||||
if (item.commentBefore)
|
||||
pair.key.commentBefore = pair.key.commentBefore
|
||||
? `${item.commentBefore}\n${pair.key.commentBefore}`
|
||||
: item.commentBefore;
|
||||
if (item.comment) {
|
||||
const cn = pair.value ?? pair.key;
|
||||
cn.comment = cn.comment
|
||||
? `${item.comment}\n${cn.comment}`
|
||||
: item.comment;
|
||||
}
|
||||
item = pair;
|
||||
}
|
||||
seq.items[i] = isPair(item) ? item : new Pair(item);
|
||||
}
|
||||
}
|
||||
else
|
||||
onError('Expected a sequence for this tag');
|
||||
return seq;
|
||||
}
|
||||
function createPairs(schema, iterable, ctx) {
|
||||
const { replacer } = ctx;
|
||||
const pairs = new YAMLSeq(schema);
|
||||
pairs.tag = 'tag:yaml.org,2002:pairs';
|
||||
let i = 0;
|
||||
if (iterable && Symbol.iterator in Object(iterable))
|
||||
for (let it of iterable) {
|
||||
if (typeof replacer === 'function')
|
||||
it = replacer.call(iterable, String(i++), it);
|
||||
let key, value;
|
||||
if (Array.isArray(it)) {
|
||||
if (it.length === 2) {
|
||||
key = it[0];
|
||||
value = it[1];
|
||||
}
|
||||
else
|
||||
throw new TypeError(`Expected [key, value] tuple: ${it}`);
|
||||
}
|
||||
else if (it && it instanceof Object) {
|
||||
const keys = Object.keys(it);
|
||||
if (keys.length === 1) {
|
||||
key = keys[0];
|
||||
value = it[key];
|
||||
}
|
||||
else
|
||||
throw new TypeError(`Expected { key: value } tuple: ${it}`);
|
||||
}
|
||||
else {
|
||||
key = it;
|
||||
}
|
||||
pairs.items.push(createPair(key, value, ctx));
|
||||
}
|
||||
return pairs;
|
||||
}
|
||||
const pairs = {
|
||||
collection: 'seq',
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:pairs',
|
||||
resolve: resolvePairs,
|
||||
createNode: createPairs
|
||||
};
|
||||
|
||||
export { createPairs, pairs, resolvePairs };
|
37
node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js
generated
vendored
Normal file
37
node_modules/yaml/browser/dist/schema/yaml-1.1/schema.js
generated
vendored
Normal file
|
@ -0,0 +1,37 @@
|
|||
import { map } from '../common/map.js';
|
||||
import { nullTag } from '../common/null.js';
|
||||
import { seq } from '../common/seq.js';
|
||||
import { string } from '../common/string.js';
|
||||
import { binary } from './binary.js';
|
||||
import { trueTag, falseTag } from './bool.js';
|
||||
import { floatNaN, floatExp, float } from './float.js';
|
||||
import { intBin, intOct, int, intHex } from './int.js';
|
||||
import { omap } from './omap.js';
|
||||
import { pairs } from './pairs.js';
|
||||
import { set } from './set.js';
|
||||
import { intTime, floatTime, timestamp } from './timestamp.js';
|
||||
|
||||
const schema = [
|
||||
map,
|
||||
seq,
|
||||
string,
|
||||
nullTag,
|
||||
trueTag,
|
||||
falseTag,
|
||||
intBin,
|
||||
intOct,
|
||||
int,
|
||||
intHex,
|
||||
floatNaN,
|
||||
floatExp,
|
||||
float,
|
||||
binary,
|
||||
omap,
|
||||
pairs,
|
||||
set,
|
||||
intTime,
|
||||
floatTime,
|
||||
timestamp
|
||||
];
|
||||
|
||||
export { schema };
|
92
node_modules/yaml/browser/dist/schema/yaml-1.1/set.js
generated
vendored
Normal file
92
node_modules/yaml/browser/dist/schema/yaml-1.1/set.js
generated
vendored
Normal file
|
@ -0,0 +1,92 @@
|
|||
import { isMap, isPair, isScalar } from '../../nodes/Node.js';
|
||||
import { createPair, Pair } from '../../nodes/Pair.js';
|
||||
import { YAMLMap, findPair } from '../../nodes/YAMLMap.js';
|
||||
|
||||
class YAMLSet extends YAMLMap {
|
||||
constructor(schema) {
|
||||
super(schema);
|
||||
this.tag = YAMLSet.tag;
|
||||
}
|
||||
add(key) {
|
||||
let pair;
|
||||
if (isPair(key))
|
||||
pair = key;
|
||||
else if (key &&
|
||||
typeof key === 'object' &&
|
||||
'key' in key &&
|
||||
'value' in key &&
|
||||
key.value === null)
|
||||
pair = new Pair(key.key, null);
|
||||
else
|
||||
pair = new Pair(key, null);
|
||||
const prev = findPair(this.items, pair.key);
|
||||
if (!prev)
|
||||
this.items.push(pair);
|
||||
}
|
||||
/**
|
||||
* If `keepPair` is `true`, returns the Pair matching `key`.
|
||||
* Otherwise, returns the value of that Pair's key.
|
||||
*/
|
||||
get(key, keepPair) {
|
||||
const pair = findPair(this.items, key);
|
||||
return !keepPair && isPair(pair)
|
||||
? isScalar(pair.key)
|
||||
? pair.key.value
|
||||
: pair.key
|
||||
: pair;
|
||||
}
|
||||
set(key, value) {
|
||||
if (typeof value !== 'boolean')
|
||||
throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);
|
||||
const prev = findPair(this.items, key);
|
||||
if (prev && !value) {
|
||||
this.items.splice(this.items.indexOf(prev), 1);
|
||||
}
|
||||
else if (!prev && value) {
|
||||
this.items.push(new Pair(key));
|
||||
}
|
||||
}
|
||||
toJSON(_, ctx) {
|
||||
return super.toJSON(_, ctx, Set);
|
||||
}
|
||||
toString(ctx, onComment, onChompKeep) {
|
||||
if (!ctx)
|
||||
return JSON.stringify(this);
|
||||
if (this.hasAllNullValues(true))
|
||||
return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);
|
||||
else
|
||||
throw new Error('Set items must all have null values');
|
||||
}
|
||||
}
|
||||
YAMLSet.tag = 'tag:yaml.org,2002:set';
|
||||
const set = {
|
||||
collection: 'map',
|
||||
identify: value => value instanceof Set,
|
||||
nodeClass: YAMLSet,
|
||||
default: false,
|
||||
tag: 'tag:yaml.org,2002:set',
|
||||
resolve(map, onError) {
|
||||
if (isMap(map)) {
|
||||
if (map.hasAllNullValues(true))
|
||||
return Object.assign(new YAMLSet(), map);
|
||||
else
|
||||
onError('Set items must all have null values');
|
||||
}
|
||||
else
|
||||
onError('Expected a mapping for this tag');
|
||||
return map;
|
||||
},
|
||||
createNode(schema, iterable, ctx) {
|
||||
const { replacer } = ctx;
|
||||
const set = new YAMLSet(schema);
|
||||
if (iterable && Symbol.iterator in Object(iterable))
|
||||
for (let value of iterable) {
|
||||
if (typeof replacer === 'function')
|
||||
value = replacer.call(iterable, value, value);
|
||||
set.items.push(createPair(value, null, ctx));
|
||||
}
|
||||
return set;
|
||||
}
|
||||
};
|
||||
|
||||
export { YAMLSet, set };
|
101
node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js
generated
vendored
Normal file
101
node_modules/yaml/browser/dist/schema/yaml-1.1/timestamp.js
generated
vendored
Normal file
|
@ -0,0 +1,101 @@
|
|||
import { stringifyNumber } from '../../stringify/stringifyNumber.js';
|
||||
|
||||
/** Internal types handle bigint as number, because TS can't figure it out. */
|
||||
function parseSexagesimal(str, asBigInt) {
|
||||
const sign = str[0];
|
||||
const parts = sign === '-' || sign === '+' ? str.substring(1) : str;
|
||||
const num = (n) => asBigInt ? BigInt(n) : Number(n);
|
||||
const res = parts
|
||||
.replace(/_/g, '')
|
||||
.split(':')
|
||||
.reduce((res, p) => res * num(60) + num(p), num(0));
|
||||
return (sign === '-' ? num(-1) * res : res);
|
||||
}
|
||||
/**
|
||||
* hhhh:mm:ss.sss
|
||||
*
|
||||
* Internal types handle bigint as number, because TS can't figure it out.
|
||||
*/
|
||||
function stringifySexagesimal(node) {
|
||||
let { value } = node;
|
||||
let num = (n) => n;
|
||||
if (typeof value === 'bigint')
|
||||
num = n => BigInt(n);
|
||||
else if (isNaN(value) || !isFinite(value))
|
||||
return stringifyNumber(node);
|
||||
let sign = '';
|
||||
if (value < 0) {
|
||||
sign = '-';
|
||||
value *= num(-1);
|
||||
}
|
||||
const _60 = num(60);
|
||||
const parts = [value % _60]; // seconds, including ms
|
||||
if (value < 60) {
|
||||
parts.unshift(0); // at least one : is required
|
||||
}
|
||||
else {
|
||||
value = (value - parts[0]) / _60;
|
||||
parts.unshift(value % _60); // minutes
|
||||
if (value >= 60) {
|
||||
value = (value - parts[0]) / _60;
|
||||
parts.unshift(value); // hours
|
||||
}
|
||||
}
|
||||
return (sign +
|
||||
parts
|
||||
.map(n => (n < 10 ? '0' + String(n) : String(n)))
|
||||
.join(':')
|
||||
.replace(/000000\d*$/, '') // % 60 may introduce error
|
||||
);
|
||||
}
|
||||
const intTime = {
|
||||
identify: value => typeof value === 'bigint' || Number.isInteger(value),
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:int',
|
||||
format: 'TIME',
|
||||
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,
|
||||
resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),
|
||||
stringify: stringifySexagesimal
|
||||
};
|
||||
const floatTime = {
|
||||
identify: value => typeof value === 'number',
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:float',
|
||||
format: 'TIME',
|
||||
test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/,
|
||||
resolve: str => parseSexagesimal(str, false),
|
||||
stringify: stringifySexagesimal
|
||||
};
|
||||
const timestamp = {
|
||||
identify: value => value instanceof Date,
|
||||
default: true,
|
||||
tag: 'tag:yaml.org,2002:timestamp',
|
||||
// If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part
|
||||
// may be omitted altogether, resulting in a date format. In such a case, the time part is
|
||||
// assumed to be 00:00:00Z (start of day, UTC).
|
||||
test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd
|
||||
'(?:' + // time is optional
|
||||
'(?:t|T|[ \\t]+)' + // t | T | whitespace
|
||||
'([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?
|
||||
'(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30
|
||||
')?$'),
|
||||
resolve(str) {
|
||||
const match = str.match(timestamp.test);
|
||||
if (!match)
|
||||
throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');
|
||||
const [, year, month, day, hour, minute, second] = match.map(Number);
|
||||
const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;
|
||||
let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);
|
||||
const tz = match[8];
|
||||
if (tz && tz !== 'Z') {
|
||||
let d = parseSexagesimal(tz, false);
|
||||
if (Math.abs(d) < 30)
|
||||
d *= 60;
|
||||
date -= 60000 * d;
|
||||
}
|
||||
return new Date(date);
|
||||
},
|
||||
stringify: ({ value }) => value.toISOString().replace(/((T00:00)?:00)?\.000Z$/, '')
|
||||
};
|
||||
|
||||
export { floatTime, intTime, timestamp };
|
135
node_modules/yaml/browser/dist/stringify/foldFlowLines.js
generated
vendored
Normal file
135
node_modules/yaml/browser/dist/stringify/foldFlowLines.js
generated
vendored
Normal file
|
@ -0,0 +1,135 @@
|
|||
const FOLD_FLOW = 'flow';
|
||||
const FOLD_BLOCK = 'block';
|
||||
const FOLD_QUOTED = 'quoted';
|
||||
/**
|
||||
* Tries to keep input at up to `lineWidth` characters, splitting only on spaces
|
||||
* not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are
|
||||
* terminated with `\n` and started with `indent`.
|
||||
*/
|
||||
function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {
|
||||
if (!lineWidth || lineWidth < 0)
|
||||
return text;
|
||||
const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);
|
||||
if (text.length <= endStep)
|
||||
return text;
|
||||
const folds = [];
|
||||
const escapedFolds = {};
|
||||
let end = lineWidth - indent.length;
|
||||
if (typeof indentAtStart === 'number') {
|
||||
if (indentAtStart > lineWidth - Math.max(2, minContentWidth))
|
||||
folds.push(0);
|
||||
else
|
||||
end = lineWidth - indentAtStart;
|
||||
}
|
||||
let split = undefined;
|
||||
let prev = undefined;
|
||||
let overflow = false;
|
||||
let i = -1;
|
||||
let escStart = -1;
|
||||
let escEnd = -1;
|
||||
if (mode === FOLD_BLOCK) {
|
||||
i = consumeMoreIndentedLines(text, i);
|
||||
if (i !== -1)
|
||||
end = i + endStep;
|
||||
}
|
||||
for (let ch; (ch = text[(i += 1)]);) {
|
||||
if (mode === FOLD_QUOTED && ch === '\\') {
|
||||
escStart = i;
|
||||
switch (text[i + 1]) {
|
||||
case 'x':
|
||||
i += 3;
|
||||
break;
|
||||
case 'u':
|
||||
i += 5;
|
||||
break;
|
||||
case 'U':
|
||||
i += 9;
|
||||
break;
|
||||
default:
|
||||
i += 1;
|
||||
}
|
||||
escEnd = i;
|
||||
}
|
||||
if (ch === '\n') {
|
||||
if (mode === FOLD_BLOCK)
|
||||
i = consumeMoreIndentedLines(text, i);
|
||||
end = i + endStep;
|
||||
split = undefined;
|
||||
}
|
||||
else {
|
||||
if (ch === ' ' &&
|
||||
prev &&
|
||||
prev !== ' ' &&
|
||||
prev !== '\n' &&
|
||||
prev !== '\t') {
|
||||
// space surrounded by non-space can be replaced with newline + indent
|
||||
const next = text[i + 1];
|
||||
if (next && next !== ' ' && next !== '\n' && next !== '\t')
|
||||
split = i;
|
||||
}
|
||||
if (i >= end) {
|
||||
if (split) {
|
||||
folds.push(split);
|
||||
end = split + endStep;
|
||||
split = undefined;
|
||||
}
|
||||
else if (mode === FOLD_QUOTED) {
|
||||
// white-space collected at end may stretch past lineWidth
|
||||
while (prev === ' ' || prev === '\t') {
|
||||
prev = ch;
|
||||
ch = text[(i += 1)];
|
||||
overflow = true;
|
||||
}
|
||||
// Account for newline escape, but don't break preceding escape
|
||||
const j = i > escEnd + 1 ? i - 2 : escStart - 1;
|
||||
// Bail out if lineWidth & minContentWidth are shorter than an escape string
|
||||
if (escapedFolds[j])
|
||||
return text;
|
||||
folds.push(j);
|
||||
escapedFolds[j] = true;
|
||||
end = j + endStep;
|
||||
split = undefined;
|
||||
}
|
||||
else {
|
||||
overflow = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
prev = ch;
|
||||
}
|
||||
if (overflow && onOverflow)
|
||||
onOverflow();
|
||||
if (folds.length === 0)
|
||||
return text;
|
||||
if (onFold)
|
||||
onFold();
|
||||
let res = text.slice(0, folds[0]);
|
||||
for (let i = 0; i < folds.length; ++i) {
|
||||
const fold = folds[i];
|
||||
const end = folds[i + 1] || text.length;
|
||||
if (fold === 0)
|
||||
res = `\n${indent}${text.slice(0, end)}`;
|
||||
else {
|
||||
if (mode === FOLD_QUOTED && escapedFolds[fold])
|
||||
res += `${text[fold]}\\`;
|
||||
res += `\n${indent}${text.slice(fold + 1, end)}`;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
/**
|
||||
* Presumes `i + 1` is at the start of a line
|
||||
* @returns index of last newline in more-indented block
|
||||
*/
|
||||
function consumeMoreIndentedLines(text, i) {
|
||||
let ch = text[i + 1];
|
||||
while (ch === ' ' || ch === '\t') {
|
||||
do {
|
||||
ch = text[(i += 1)];
|
||||
} while (ch && ch !== '\n');
|
||||
ch = text[i + 1];
|
||||
}
|
||||
return i;
|
||||
}
|
||||
|
||||
export { FOLD_BLOCK, FOLD_FLOW, FOLD_QUOTED, foldFlowLines };
|
124
node_modules/yaml/browser/dist/stringify/stringify.js
generated
vendored
Normal file
124
node_modules/yaml/browser/dist/stringify/stringify.js
generated
vendored
Normal file
|
@ -0,0 +1,124 @@
|
|||
import { anchorIsValid } from '../doc/anchors.js';
|
||||
import { isPair, isAlias, isNode, isScalar, isCollection } from '../nodes/Node.js';
|
||||
import { stringifyComment } from './stringifyComment.js';
|
||||
import { stringifyString } from './stringifyString.js';
|
||||
|
||||
function createStringifyContext(doc, options) {
|
||||
const opt = Object.assign({
|
||||
blockQuote: true,
|
||||
commentString: stringifyComment,
|
||||
defaultKeyType: null,
|
||||
defaultStringType: 'PLAIN',
|
||||
directives: null,
|
||||
doubleQuotedAsJSON: false,
|
||||
doubleQuotedMinMultiLineLength: 40,
|
||||
falseStr: 'false',
|
||||
flowCollectionPadding: true,
|
||||
indentSeq: true,
|
||||
lineWidth: 80,
|
||||
minContentWidth: 20,
|
||||
nullStr: 'null',
|
||||
simpleKeys: false,
|
||||
singleQuote: null,
|
||||
trueStr: 'true',
|
||||
verifyAliasOrder: true
|
||||
}, doc.schema.toStringOptions, options);
|
||||
let inFlow;
|
||||
switch (opt.collectionStyle) {
|
||||
case 'block':
|
||||
inFlow = false;
|
||||
break;
|
||||
case 'flow':
|
||||
inFlow = true;
|
||||
break;
|
||||
default:
|
||||
inFlow = null;
|
||||
}
|
||||
return {
|
||||
anchors: new Set(),
|
||||
doc,
|
||||
flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '',
|
||||
indent: '',
|
||||
indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ',
|
||||
inFlow,
|
||||
options: opt
|
||||
};
|
||||
}
|
||||
function getTagObject(tags, item) {
|
||||
if (item.tag) {
|
||||
const match = tags.filter(t => t.tag === item.tag);
|
||||
if (match.length > 0)
|
||||
return match.find(t => t.format === item.format) ?? match[0];
|
||||
}
|
||||
let tagObj = undefined;
|
||||
let obj;
|
||||
if (isScalar(item)) {
|
||||
obj = item.value;
|
||||
const match = tags.filter(t => t.identify?.(obj));
|
||||
tagObj =
|
||||
match.find(t => t.format === item.format) ?? match.find(t => !t.format);
|
||||
}
|
||||
else {
|
||||
obj = item;
|
||||
tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);
|
||||
}
|
||||
if (!tagObj) {
|
||||
const name = obj?.constructor?.name ?? typeof obj;
|
||||
throw new Error(`Tag not resolved for ${name} value`);
|
||||
}
|
||||
return tagObj;
|
||||
}
|
||||
// needs to be called before value stringifier to allow for circular anchor refs
|
||||
function stringifyProps(node, tagObj, { anchors, doc }) {
|
||||
if (!doc.directives)
|
||||
return '';
|
||||
const props = [];
|
||||
const anchor = (isScalar(node) || isCollection(node)) && node.anchor;
|
||||
if (anchor && anchorIsValid(anchor)) {
|
||||
anchors.add(anchor);
|
||||
props.push(`&${anchor}`);
|
||||
}
|
||||
const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;
|
||||
if (tag)
|
||||
props.push(doc.directives.tagString(tag));
|
||||
return props.join(' ');
|
||||
}
|
||||
function stringify(item, ctx, onComment, onChompKeep) {
|
||||
if (isPair(item))
|
||||
return item.toString(ctx, onComment, onChompKeep);
|
||||
if (isAlias(item)) {
|
||||
if (ctx.doc.directives)
|
||||
return item.toString(ctx);
|
||||
if (ctx.resolvedAliases?.has(item)) {
|
||||
throw new TypeError(`Cannot stringify circular structure without alias nodes`);
|
||||
}
|
||||
else {
|
||||
if (ctx.resolvedAliases)
|
||||
ctx.resolvedAliases.add(item);
|
||||
else
|
||||
ctx.resolvedAliases = new Set([item]);
|
||||
item = item.resolve(ctx.doc);
|
||||
}
|
||||
}
|
||||
let tagObj = undefined;
|
||||
const node = isNode(item)
|
||||
? item
|
||||
: ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });
|
||||
if (!tagObj)
|
||||
tagObj = getTagObject(ctx.doc.schema.tags, node);
|
||||
const props = stringifyProps(node, tagObj, ctx);
|
||||
if (props.length > 0)
|
||||
ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;
|
||||
const str = typeof tagObj.stringify === 'function'
|
||||
? tagObj.stringify(node, ctx, onComment, onChompKeep)
|
||||
: isScalar(node)
|
||||
? stringifyString(node, ctx, onComment, onChompKeep)
|
||||
: node.toString(ctx, onComment, onChompKeep);
|
||||
if (!props)
|
||||
return str;
|
||||
return isScalar(node) || str[0] === '{' || str[0] === '['
|
||||
? `${props} ${str}`
|
||||
: `${props}\n${ctx.indent}${str}`;
|
||||
}
|
||||
|
||||
export { createStringifyContext, stringify };
|
151
node_modules/yaml/browser/dist/stringify/stringifyCollection.js
generated
vendored
Normal file
151
node_modules/yaml/browser/dist/stringify/stringifyCollection.js
generated
vendored
Normal file
|
@ -0,0 +1,151 @@
|
|||
import { Collection } from '../nodes/Collection.js';
|
||||
import { isNode, isPair } from '../nodes/Node.js';
|
||||
import { stringify } from './stringify.js';
|
||||
import { lineComment, indentComment } from './stringifyComment.js';
|
||||
|
||||
function stringifyCollection(collection, ctx, options) {
|
||||
const flow = ctx.inFlow ?? collection.flow;
|
||||
const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;
|
||||
return stringify(collection, ctx, options);
|
||||
}
|
||||
function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {
|
||||
const { indent, options: { commentString } } = ctx;
|
||||
const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });
|
||||
let chompKeep = false; // flag for the preceding node's status
|
||||
const lines = [];
|
||||
for (let i = 0; i < items.length; ++i) {
|
||||
const item = items[i];
|
||||
let comment = null;
|
||||
if (isNode(item)) {
|
||||
if (!chompKeep && item.spaceBefore)
|
||||
lines.push('');
|
||||
addCommentBefore(ctx, lines, item.commentBefore, chompKeep);
|
||||
if (item.comment)
|
||||
comment = item.comment;
|
||||
}
|
||||
else if (isPair(item)) {
|
||||
const ik = isNode(item.key) ? item.key : null;
|
||||
if (ik) {
|
||||
if (!chompKeep && ik.spaceBefore)
|
||||
lines.push('');
|
||||
addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);
|
||||
}
|
||||
}
|
||||
chompKeep = false;
|
||||
let str = stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));
|
||||
if (comment)
|
||||
str += lineComment(str, itemIndent, commentString(comment));
|
||||
if (chompKeep && comment)
|
||||
chompKeep = false;
|
||||
lines.push(blockItemPrefix + str);
|
||||
}
|
||||
let str;
|
||||
if (lines.length === 0) {
|
||||
str = flowChars.start + flowChars.end;
|
||||
}
|
||||
else {
|
||||
str = lines[0];
|
||||
for (let i = 1; i < lines.length; ++i) {
|
||||
const line = lines[i];
|
||||
str += line ? `\n${indent}${line}` : '\n';
|
||||
}
|
||||
}
|
||||
if (comment) {
|
||||
str += '\n' + indentComment(commentString(comment), indent);
|
||||
if (onComment)
|
||||
onComment();
|
||||
}
|
||||
else if (chompKeep && onChompKeep)
|
||||
onChompKeep();
|
||||
return str;
|
||||
}
|
||||
function stringifyFlowCollection({ comment, items }, ctx, { flowChars, itemIndent, onComment }) {
|
||||
const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx;
|
||||
itemIndent += indentStep;
|
||||
const itemCtx = Object.assign({}, ctx, {
|
||||
indent: itemIndent,
|
||||
inFlow: true,
|
||||
type: null
|
||||
});
|
||||
let reqNewline = false;
|
||||
let linesAtValue = 0;
|
||||
const lines = [];
|
||||
for (let i = 0; i < items.length; ++i) {
|
||||
const item = items[i];
|
||||
let comment = null;
|
||||
if (isNode(item)) {
|
||||
if (item.spaceBefore)
|
||||
lines.push('');
|
||||
addCommentBefore(ctx, lines, item.commentBefore, false);
|
||||
if (item.comment)
|
||||
comment = item.comment;
|
||||
}
|
||||
else if (isPair(item)) {
|
||||
const ik = isNode(item.key) ? item.key : null;
|
||||
if (ik) {
|
||||
if (ik.spaceBefore)
|
||||
lines.push('');
|
||||
addCommentBefore(ctx, lines, ik.commentBefore, false);
|
||||
if (ik.comment)
|
||||
reqNewline = true;
|
||||
}
|
||||
const iv = isNode(item.value) ? item.value : null;
|
||||
if (iv) {
|
||||
if (iv.comment)
|
||||
comment = iv.comment;
|
||||
if (iv.commentBefore)
|
||||
reqNewline = true;
|
||||
}
|
||||
else if (item.value == null && ik && ik.comment) {
|
||||
comment = ik.comment;
|
||||
}
|
||||
}
|
||||
if (comment)
|
||||
reqNewline = true;
|
||||
let str = stringify(item, itemCtx, () => (comment = null));
|
||||
if (i < items.length - 1)
|
||||
str += ',';
|
||||
if (comment)
|
||||
str += lineComment(str, itemIndent, commentString(comment));
|
||||
if (!reqNewline && (lines.length > linesAtValue || str.includes('\n')))
|
||||
reqNewline = true;
|
||||
lines.push(str);
|
||||
linesAtValue = lines.length;
|
||||
}
|
||||
let str;
|
||||
const { start, end } = flowChars;
|
||||
if (lines.length === 0) {
|
||||
str = start + end;
|
||||
}
|
||||
else {
|
||||
if (!reqNewline) {
|
||||
const len = lines.reduce((sum, line) => sum + line.length + 2, 2);
|
||||
reqNewline = len > Collection.maxFlowStringSingleLineLength;
|
||||
}
|
||||
if (reqNewline) {
|
||||
str = start;
|
||||
for (const line of lines)
|
||||
str += line ? `\n${indentStep}${indent}${line}` : '\n';
|
||||
str += `\n${indent}${end}`;
|
||||
}
|
||||
else {
|
||||
str = `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;
|
||||
}
|
||||
}
|
||||
if (comment) {
|
||||
str += lineComment(str, commentString(comment), indent);
|
||||
if (onComment)
|
||||
onComment();
|
||||
}
|
||||
return str;
|
||||
}
|
||||
function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {
|
||||
if (comment && chompKeep)
|
||||
comment = comment.replace(/^\n+/, '');
|
||||
if (comment) {
|
||||
const ic = indentComment(commentString(comment), indent);
|
||||
lines.push(ic.trimStart()); // Avoid double indent on first line
|
||||
}
|
||||
}
|
||||
|
||||
export { stringifyCollection };
|
20
node_modules/yaml/browser/dist/stringify/stringifyComment.js
generated
vendored
Normal file
20
node_modules/yaml/browser/dist/stringify/stringifyComment.js
generated
vendored
Normal file
|
@ -0,0 +1,20 @@
|
|||
/**
|
||||
* Stringifies a comment.
|
||||
*
|
||||
* Empty comment lines are left empty,
|
||||
* lines consisting of a single space are replaced by `#`,
|
||||
* and all other lines are prefixed with a `#`.
|
||||
*/
|
||||
const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');
|
||||
function indentComment(comment, indent) {
|
||||
if (/^\n+$/.test(comment))
|
||||
return comment.substring(1);
|
||||
return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;
|
||||
}
|
||||
const lineComment = (str, indent, comment) => str.endsWith('\n')
|
||||
? indentComment(comment, indent)
|
||||
: comment.includes('\n')
|
||||
? '\n' + indentComment(comment, indent)
|
||||
: (str.endsWith(' ') ? '' : ' ') + comment;
|
||||
|
||||
export { indentComment, lineComment, stringifyComment };
|
85
node_modules/yaml/browser/dist/stringify/stringifyDocument.js
generated
vendored
Normal file
85
node_modules/yaml/browser/dist/stringify/stringifyDocument.js
generated
vendored
Normal file
|
@ -0,0 +1,85 @@
|
|||
import { isNode } from '../nodes/Node.js';
|
||||
import { createStringifyContext, stringify } from './stringify.js';
|
||||
import { indentComment, lineComment } from './stringifyComment.js';
|
||||
|
||||
function stringifyDocument(doc, options) {
|
||||
const lines = [];
|
||||
let hasDirectives = options.directives === true;
|
||||
if (options.directives !== false && doc.directives) {
|
||||
const dir = doc.directives.toString(doc);
|
||||
if (dir) {
|
||||
lines.push(dir);
|
||||
hasDirectives = true;
|
||||
}
|
||||
else if (doc.directives.docStart)
|
||||
hasDirectives = true;
|
||||
}
|
||||
if (hasDirectives)
|
||||
lines.push('---');
|
||||
const ctx = createStringifyContext(doc, options);
|
||||
const { commentString } = ctx.options;
|
||||
if (doc.commentBefore) {
|
||||
if (lines.length !== 1)
|
||||
lines.unshift('');
|
||||
const cs = commentString(doc.commentBefore);
|
||||
lines.unshift(indentComment(cs, ''));
|
||||
}
|
||||
let chompKeep = false;
|
||||
let contentComment = null;
|
||||
if (doc.contents) {
|
||||
if (isNode(doc.contents)) {
|
||||
if (doc.contents.spaceBefore && hasDirectives)
|
||||
lines.push('');
|
||||
if (doc.contents.commentBefore) {
|
||||
const cs = commentString(doc.contents.commentBefore);
|
||||
lines.push(indentComment(cs, ''));
|
||||
}
|
||||
// top-level block scalars need to be indented if followed by a comment
|
||||
ctx.forceBlockIndent = !!doc.comment;
|
||||
contentComment = doc.contents.comment;
|
||||
}
|
||||
const onChompKeep = contentComment ? undefined : () => (chompKeep = true);
|
||||
let body = stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);
|
||||
if (contentComment)
|
||||
body += lineComment(body, '', commentString(contentComment));
|
||||
if ((body[0] === '|' || body[0] === '>') &&
|
||||
lines[lines.length - 1] === '---') {
|
||||
// Top-level block scalars with a preceding doc marker ought to use the
|
||||
// same line for their header.
|
||||
lines[lines.length - 1] = `--- ${body}`;
|
||||
}
|
||||
else
|
||||
lines.push(body);
|
||||
}
|
||||
else {
|
||||
lines.push(stringify(doc.contents, ctx));
|
||||
}
|
||||
if (doc.directives?.docEnd) {
|
||||
if (doc.comment) {
|
||||
const cs = commentString(doc.comment);
|
||||
if (cs.includes('\n')) {
|
||||
lines.push('...');
|
||||
lines.push(indentComment(cs, ''));
|
||||
}
|
||||
else {
|
||||
lines.push(`... ${cs}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
lines.push('...');
|
||||
}
|
||||
}
|
||||
else {
|
||||
let dc = doc.comment;
|
||||
if (dc && chompKeep)
|
||||
dc = dc.replace(/^\n+/, '');
|
||||
if (dc) {
|
||||
if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')
|
||||
lines.push('');
|
||||
lines.push(indentComment(commentString(dc), ''));
|
||||
}
|
||||
}
|
||||
return lines.join('\n') + '\n';
|
||||
}
|
||||
|
||||
export { stringifyDocument };
|
24
node_modules/yaml/browser/dist/stringify/stringifyNumber.js
generated
vendored
Normal file
24
node_modules/yaml/browser/dist/stringify/stringifyNumber.js
generated
vendored
Normal file
|
@ -0,0 +1,24 @@
|
|||
function stringifyNumber({ format, minFractionDigits, tag, value }) {
|
||||
if (typeof value === 'bigint')
|
||||
return String(value);
|
||||
const num = typeof value === 'number' ? value : Number(value);
|
||||
if (!isFinite(num))
|
||||
return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';
|
||||
let n = JSON.stringify(value);
|
||||
if (!format &&
|
||||
minFractionDigits &&
|
||||
(!tag || tag === 'tag:yaml.org,2002:float') &&
|
||||
/^\d/.test(n)) {
|
||||
let i = n.indexOf('.');
|
||||
if (i < 0) {
|
||||
i = n.length;
|
||||
n += '.';
|
||||
}
|
||||
let d = minFractionDigits - (n.length - i - 1);
|
||||
while (d-- > 0)
|
||||
n += '0';
|
||||
}
|
||||
return n;
|
||||
}
|
||||
|
||||
export { stringifyNumber };
|
150
node_modules/yaml/browser/dist/stringify/stringifyPair.js
generated
vendored
Normal file
150
node_modules/yaml/browser/dist/stringify/stringifyPair.js
generated
vendored
Normal file
|
@ -0,0 +1,150 @@
|
|||
import { isCollection, isNode, isScalar, isSeq } from '../nodes/Node.js';
|
||||
import { Scalar } from '../nodes/Scalar.js';
|
||||
import { stringify } from './stringify.js';
|
||||
import { lineComment, indentComment } from './stringifyComment.js';
|
||||
|
||||
function stringifyPair({ key, value }, ctx, onComment, onChompKeep) {
|
||||
const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;
|
||||
let keyComment = (isNode(key) && key.comment) || null;
|
||||
if (simpleKeys) {
|
||||
if (keyComment) {
|
||||
throw new Error('With simple keys, key nodes cannot have comments');
|
||||
}
|
||||
if (isCollection(key)) {
|
||||
const msg = 'With simple keys, collection cannot be used as a key value';
|
||||
throw new Error(msg);
|
||||
}
|
||||
}
|
||||
let explicitKey = !simpleKeys &&
|
||||
(!key ||
|
||||
(keyComment && value == null && !ctx.inFlow) ||
|
||||
isCollection(key) ||
|
||||
(isScalar(key)
|
||||
? key.type === Scalar.BLOCK_FOLDED || key.type === Scalar.BLOCK_LITERAL
|
||||
: typeof key === 'object'));
|
||||
ctx = Object.assign({}, ctx, {
|
||||
allNullValues: false,
|
||||
implicitKey: !explicitKey && (simpleKeys || !allNullValues),
|
||||
indent: indent + indentStep
|
||||
});
|
||||
let keyCommentDone = false;
|
||||
let chompKeep = false;
|
||||
let str = stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));
|
||||
if (!explicitKey && !ctx.inFlow && str.length > 1024) {
|
||||
if (simpleKeys)
|
||||
throw new Error('With simple keys, single line scalar must not span more than 1024 characters');
|
||||
explicitKey = true;
|
||||
}
|
||||
if (ctx.inFlow) {
|
||||
if (allNullValues || value == null) {
|
||||
if (keyCommentDone && onComment)
|
||||
onComment();
|
||||
return str === '' ? '?' : explicitKey ? `? ${str}` : str;
|
||||
}
|
||||
}
|
||||
else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {
|
||||
str = `? ${str}`;
|
||||
if (keyComment && !keyCommentDone) {
|
||||
str += lineComment(str, ctx.indent, commentString(keyComment));
|
||||
}
|
||||
else if (chompKeep && onChompKeep)
|
||||
onChompKeep();
|
||||
return str;
|
||||
}
|
||||
if (keyCommentDone)
|
||||
keyComment = null;
|
||||
if (explicitKey) {
|
||||
if (keyComment)
|
||||
str += lineComment(str, ctx.indent, commentString(keyComment));
|
||||
str = `? ${str}\n${indent}:`;
|
||||
}
|
||||
else {
|
||||
str = `${str}:`;
|
||||
if (keyComment)
|
||||
str += lineComment(str, ctx.indent, commentString(keyComment));
|
||||
}
|
||||
let vsb, vcb, valueComment;
|
||||
if (isNode(value)) {
|
||||
vsb = !!value.spaceBefore;
|
||||
vcb = value.commentBefore;
|
||||
valueComment = value.comment;
|
||||
}
|
||||
else {
|
||||
vsb = false;
|
||||
vcb = null;
|
||||
valueComment = null;
|
||||
if (value && typeof value === 'object')
|
||||
value = doc.createNode(value);
|
||||
}
|
||||
ctx.implicitKey = false;
|
||||
if (!explicitKey && !keyComment && isScalar(value))
|
||||
ctx.indentAtStart = str.length + 1;
|
||||
chompKeep = false;
|
||||
if (!indentSeq &&
|
||||
indentStep.length >= 2 &&
|
||||
!ctx.inFlow &&
|
||||
!explicitKey &&
|
||||
isSeq(value) &&
|
||||
!value.flow &&
|
||||
!value.tag &&
|
||||
!value.anchor) {
|
||||
// If indentSeq === false, consider '- ' as part of indentation where possible
|
||||
ctx.indent = ctx.indent.substring(2);
|
||||
}
|
||||
let valueCommentDone = false;
|
||||
const valueStr = stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));
|
||||
let ws = ' ';
|
||||
if (keyComment || vsb || vcb) {
|
||||
ws = vsb ? '\n' : '';
|
||||
if (vcb) {
|
||||
const cs = commentString(vcb);
|
||||
ws += `\n${indentComment(cs, ctx.indent)}`;
|
||||
}
|
||||
if (valueStr === '' && !ctx.inFlow) {
|
||||
if (ws === '\n')
|
||||
ws = '\n\n';
|
||||
}
|
||||
else {
|
||||
ws += `\n${ctx.indent}`;
|
||||
}
|
||||
}
|
||||
else if (!explicitKey && isCollection(value)) {
|
||||
const vs0 = valueStr[0];
|
||||
const nl0 = valueStr.indexOf('\n');
|
||||
const hasNewline = nl0 !== -1;
|
||||
const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0;
|
||||
if (hasNewline || !flow) {
|
||||
let hasPropsLine = false;
|
||||
if (hasNewline && (vs0 === '&' || vs0 === '!')) {
|
||||
let sp0 = valueStr.indexOf(' ');
|
||||
if (vs0 === '&' &&
|
||||
sp0 !== -1 &&
|
||||
sp0 < nl0 &&
|
||||
valueStr[sp0 + 1] === '!') {
|
||||
sp0 = valueStr.indexOf(' ', sp0 + 1);
|
||||
}
|
||||
if (sp0 === -1 || nl0 < sp0)
|
||||
hasPropsLine = true;
|
||||
}
|
||||
if (!hasPropsLine)
|
||||
ws = `\n${ctx.indent}`;
|
||||
}
|
||||
}
|
||||
else if (valueStr === '' || valueStr[0] === '\n') {
|
||||
ws = '';
|
||||
}
|
||||
str += ws + valueStr;
|
||||
if (ctx.inFlow) {
|
||||
if (valueCommentDone && onComment)
|
||||
onComment();
|
||||
}
|
||||
else if (valueComment && !valueCommentDone) {
|
||||
str += lineComment(str, ctx.indent, commentString(valueComment));
|
||||
}
|
||||
else if (chompKeep && onChompKeep) {
|
||||
onChompKeep();
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
export { stringifyPair };
|
319
node_modules/yaml/browser/dist/stringify/stringifyString.js
generated
vendored
Normal file
319
node_modules/yaml/browser/dist/stringify/stringifyString.js
generated
vendored
Normal file
|
@ -0,0 +1,319 @@
|
|||
import { Scalar } from '../nodes/Scalar.js';
|
||||
import { foldFlowLines, FOLD_QUOTED, FOLD_FLOW, FOLD_BLOCK } from './foldFlowLines.js';
|
||||
|
||||
const getFoldOptions = (ctx) => ({
|
||||
indentAtStart: ctx.indentAtStart,
|
||||
lineWidth: ctx.options.lineWidth,
|
||||
minContentWidth: ctx.options.minContentWidth
|
||||
});
|
||||
// Also checks for lines starting with %, as parsing the output as YAML 1.1 will
|
||||
// presume that's starting a new document.
|
||||
const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str);
|
||||
function lineLengthOverLimit(str, lineWidth, indentLength) {
|
||||
if (!lineWidth || lineWidth < 0)
|
||||
return false;
|
||||
const limit = lineWidth - indentLength;
|
||||
const strLen = str.length;
|
||||
if (strLen <= limit)
|
||||
return false;
|
||||
for (let i = 0, start = 0; i < strLen; ++i) {
|
||||
if (str[i] === '\n') {
|
||||
if (i - start > limit)
|
||||
return true;
|
||||
start = i + 1;
|
||||
if (strLen - start <= limit)
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
function doubleQuotedString(value, ctx) {
|
||||
const json = JSON.stringify(value);
|
||||
if (ctx.options.doubleQuotedAsJSON)
|
||||
return json;
|
||||
const { implicitKey } = ctx;
|
||||
const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;
|
||||
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
|
||||
let str = '';
|
||||
let start = 0;
|
||||
for (let i = 0, ch = json[i]; ch; ch = json[++i]) {
|
||||
if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') {
|
||||
// space before newline needs to be escaped to not be folded
|
||||
str += json.slice(start, i) + '\\ ';
|
||||
i += 1;
|
||||
start = i;
|
||||
ch = '\\';
|
||||
}
|
||||
if (ch === '\\')
|
||||
switch (json[i + 1]) {
|
||||
case 'u':
|
||||
{
|
||||
str += json.slice(start, i);
|
||||
const code = json.substr(i + 2, 4);
|
||||
switch (code) {
|
||||
case '0000':
|
||||
str += '\\0';
|
||||
break;
|
||||
case '0007':
|
||||
str += '\\a';
|
||||
break;
|
||||
case '000b':
|
||||
str += '\\v';
|
||||
break;
|
||||
case '001b':
|
||||
str += '\\e';
|
||||
break;
|
||||
case '0085':
|
||||
str += '\\N';
|
||||
break;
|
||||
case '00a0':
|
||||
str += '\\_';
|
||||
break;
|
||||
case '2028':
|
||||
str += '\\L';
|
||||
break;
|
||||
case '2029':
|
||||
str += '\\P';
|
||||
break;
|
||||
default:
|
||||
if (code.substr(0, 2) === '00')
|
||||
str += '\\x' + code.substr(2);
|
||||
else
|
||||
str += json.substr(i, 6);
|
||||
}
|
||||
i += 5;
|
||||
start = i + 1;
|
||||
}
|
||||
break;
|
||||
case 'n':
|
||||
if (implicitKey ||
|
||||
json[i + 2] === '"' ||
|
||||
json.length < minMultiLineLength) {
|
||||
i += 1;
|
||||
}
|
||||
else {
|
||||
// folding will eat first newline
|
||||
str += json.slice(start, i) + '\n\n';
|
||||
while (json[i + 2] === '\\' &&
|
||||
json[i + 3] === 'n' &&
|
||||
json[i + 4] !== '"') {
|
||||
str += '\n';
|
||||
i += 2;
|
||||
}
|
||||
str += indent;
|
||||
// space after newline needs to be escaped to not be folded
|
||||
if (json[i + 2] === ' ')
|
||||
str += '\\';
|
||||
i += 1;
|
||||
start = i + 1;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
str = start ? str + json.slice(start) : json;
|
||||
return implicitKey
|
||||
? str
|
||||
: foldFlowLines(str, indent, FOLD_QUOTED, getFoldOptions(ctx));
|
||||
}
|
||||
function singleQuotedString(value, ctx) {
|
||||
if (ctx.options.singleQuote === false ||
|
||||
(ctx.implicitKey && value.includes('\n')) ||
|
||||
/[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline
|
||||
)
|
||||
return doubleQuotedString(value, ctx);
|
||||
const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : '');
|
||||
const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'";
|
||||
return ctx.implicitKey
|
||||
? res
|
||||
: foldFlowLines(res, indent, FOLD_FLOW, getFoldOptions(ctx));
|
||||
}
|
||||
function quotedString(value, ctx) {
|
||||
const { singleQuote } = ctx.options;
|
||||
let qs;
|
||||
if (singleQuote === false)
|
||||
qs = doubleQuotedString;
|
||||
else {
|
||||
const hasDouble = value.includes('"');
|
||||
const hasSingle = value.includes("'");
|
||||
if (hasDouble && !hasSingle)
|
||||
qs = singleQuotedString;
|
||||
else if (hasSingle && !hasDouble)
|
||||
qs = doubleQuotedString;
|
||||
else
|
||||
qs = singleQuote ? singleQuotedString : doubleQuotedString;
|
||||
}
|
||||
return qs(value, ctx);
|
||||
}
|
||||
function blockString({ comment, type, value }, ctx, onComment, onChompKeep) {
|
||||
const { blockQuote, commentString, lineWidth } = ctx.options;
|
||||
// 1. Block can't end in whitespace unless the last line is non-empty.
|
||||
// 2. Strings consisting of only whitespace are best rendered explicitly.
|
||||
if (!blockQuote || /\n[\t ]+$/.test(value) || /^\s*$/.test(value)) {
|
||||
return quotedString(value, ctx);
|
||||
}
|
||||
const indent = ctx.indent ||
|
||||
(ctx.forceBlockIndent || containsDocumentMarker(value) ? ' ' : '');
|
||||
const literal = blockQuote === 'literal'
|
||||
? true
|
||||
: blockQuote === 'folded' || type === Scalar.BLOCK_FOLDED
|
||||
? false
|
||||
: type === Scalar.BLOCK_LITERAL
|
||||
? true
|
||||
: !lineLengthOverLimit(value, lineWidth, indent.length);
|
||||
if (!value)
|
||||
return literal ? '|\n' : '>\n';
|
||||
// determine chomping from whitespace at value end
|
||||
let chomp;
|
||||
let endStart;
|
||||
for (endStart = value.length; endStart > 0; --endStart) {
|
||||
const ch = value[endStart - 1];
|
||||
if (ch !== '\n' && ch !== '\t' && ch !== ' ')
|
||||
break;
|
||||
}
|
||||
let end = value.substring(endStart);
|
||||
const endNlPos = end.indexOf('\n');
|
||||
if (endNlPos === -1) {
|
||||
chomp = '-'; // strip
|
||||
}
|
||||
else if (value === end || endNlPos !== end.length - 1) {
|
||||
chomp = '+'; // keep
|
||||
if (onChompKeep)
|
||||
onChompKeep();
|
||||
}
|
||||
else {
|
||||
chomp = ''; // clip
|
||||
}
|
||||
if (end) {
|
||||
value = value.slice(0, -end.length);
|
||||
if (end[end.length - 1] === '\n')
|
||||
end = end.slice(0, -1);
|
||||
end = end.replace(/\n+(?!\n|$)/g, `$&${indent}`);
|
||||
}
|
||||
// determine indent indicator from whitespace at value start
|
||||
let startWithSpace = false;
|
||||
let startEnd;
|
||||
let startNlPos = -1;
|
||||
for (startEnd = 0; startEnd < value.length; ++startEnd) {
|
||||
const ch = value[startEnd];
|
||||
if (ch === ' ')
|
||||
startWithSpace = true;
|
||||
else if (ch === '\n')
|
||||
startNlPos = startEnd;
|
||||
else
|
||||
break;
|
||||
}
|
||||
let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);
|
||||
if (start) {
|
||||
value = value.substring(start.length);
|
||||
start = start.replace(/\n+/g, `$&${indent}`);
|
||||
}
|
||||
const indentSize = indent ? '2' : '1'; // root is at -1
|
||||
let header = (literal ? '|' : '>') + (startWithSpace ? indentSize : '') + chomp;
|
||||
if (comment) {
|
||||
header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' '));
|
||||
if (onComment)
|
||||
onComment();
|
||||
}
|
||||
if (literal) {
|
||||
value = value.replace(/\n+/g, `$&${indent}`);
|
||||
return `${header}\n${indent}${start}${value}${end}`;
|
||||
}
|
||||
value = value
|
||||
.replace(/\n+/g, '\n$&')
|
||||
.replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded
|
||||
// ^ more-ind. ^ empty ^ capture next empty lines only at end of indent
|
||||
.replace(/\n+/g, `$&${indent}`);
|
||||
const body = foldFlowLines(`${start}${value}${end}`, indent, FOLD_BLOCK, getFoldOptions(ctx));
|
||||
return `${header}\n${indent}${body}`;
|
||||
}
|
||||
function plainString(item, ctx, onComment, onChompKeep) {
|
||||
const { type, value } = item;
|
||||
const { actualString, implicitKey, indent, indentStep, inFlow } = ctx;
|
||||
if ((implicitKey && /[\n[\]{},]/.test(value)) ||
|
||||
(inFlow && /[[\]{},]/.test(value))) {
|
||||
return quotedString(value, ctx);
|
||||
}
|
||||
if (!value ||
|
||||
/^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) {
|
||||
// not allowed:
|
||||
// - empty string, '-' or '?'
|
||||
// - start with an indicator character (except [?:-]) or /[?-] /
|
||||
// - '\n ', ': ' or ' \n' anywhere
|
||||
// - '#' not preceded by a non-space char
|
||||
// - end with ' ' or ':'
|
||||
return implicitKey || inFlow || !value.includes('\n')
|
||||
? quotedString(value, ctx)
|
||||
: blockString(item, ctx, onComment, onChompKeep);
|
||||
}
|
||||
if (!implicitKey &&
|
||||
!inFlow &&
|
||||
type !== Scalar.PLAIN &&
|
||||
value.includes('\n')) {
|
||||
// Where allowed & type not set explicitly, prefer block style for multiline strings
|
||||
return blockString(item, ctx, onComment, onChompKeep);
|
||||
}
|
||||
if (containsDocumentMarker(value)) {
|
||||
if (indent === '') {
|
||||
ctx.forceBlockIndent = true;
|
||||
return blockString(item, ctx, onComment, onChompKeep);
|
||||
}
|
||||
else if (implicitKey && indent === indentStep) {
|
||||
return quotedString(value, ctx);
|
||||
}
|
||||
}
|
||||
const str = value.replace(/\n+/g, `$&\n${indent}`);
|
||||
// Verify that output will be parsed as a string, as e.g. plain numbers and
|
||||
// booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),
|
||||
// and others in v1.1.
|
||||
if (actualString) {
|
||||
const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);
|
||||
const { compat, tags } = ctx.doc.schema;
|
||||
if (tags.some(test) || compat?.some(test))
|
||||
return quotedString(value, ctx);
|
||||
}
|
||||
return implicitKey
|
||||
? str
|
||||
: foldFlowLines(str, indent, FOLD_FLOW, getFoldOptions(ctx));
|
||||
}
|
||||
function stringifyString(item, ctx, onComment, onChompKeep) {
|
||||
const { implicitKey, inFlow } = ctx;
|
||||
const ss = typeof item.value === 'string'
|
||||
? item
|
||||
: Object.assign({}, item, { value: String(item.value) });
|
||||
let { type } = item;
|
||||
if (type !== Scalar.QUOTE_DOUBLE) {
|
||||
// force double quotes on control characters & unpaired surrogates
|
||||
if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value))
|
||||
type = Scalar.QUOTE_DOUBLE;
|
||||
}
|
||||
const _stringify = (_type) => {
|
||||
switch (_type) {
|
||||
case Scalar.BLOCK_FOLDED:
|
||||
case Scalar.BLOCK_LITERAL:
|
||||
return implicitKey || inFlow
|
||||
? quotedString(ss.value, ctx) // blocks are not valid inside flow containers
|
||||
: blockString(ss, ctx, onComment, onChompKeep);
|
||||
case Scalar.QUOTE_DOUBLE:
|
||||
return doubleQuotedString(ss.value, ctx);
|
||||
case Scalar.QUOTE_SINGLE:
|
||||
return singleQuotedString(ss.value, ctx);
|
||||
case Scalar.PLAIN:
|
||||
return plainString(ss, ctx, onComment, onChompKeep);
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
};
|
||||
let res = _stringify(type);
|
||||
if (res === null) {
|
||||
const { defaultKeyType, defaultStringType } = ctx.options;
|
||||
const t = (implicitKey && defaultKeyType) || defaultStringType;
|
||||
res = _stringify(t);
|
||||
if (res === null)
|
||||
throw new Error(`Unsupported default string type ${t}`);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
export { stringifyString };
|
9
node_modules/yaml/browser/dist/util.js
generated
vendored
Normal file
9
node_modules/yaml/browser/dist/util.js
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
export { debug, warn } from './log.js';
|
||||
export { findPair } from './nodes/YAMLMap.js';
|
||||
export { toJS } from './nodes/toJS.js';
|
||||
export { map as mapTag } from './schema/common/map.js';
|
||||
export { seq as seqTag } from './schema/common/seq.js';
|
||||
export { string as stringTag } from './schema/common/string.js';
|
||||
export { foldFlowLines } from './stringify/foldFlowLines.js';
|
||||
export { stringifyNumber } from './stringify/stringifyNumber.js';
|
||||
export { stringifyString } from './stringify/stringifyString.js';
|
233
node_modules/yaml/browser/dist/visit.js
generated
vendored
Normal file
233
node_modules/yaml/browser/dist/visit.js
generated
vendored
Normal file
|
@ -0,0 +1,233 @@
|
|||
import { isDocument, isNode, isPair, isCollection, isMap, isSeq, isScalar, isAlias } from './nodes/Node.js';
|
||||
|
||||
const BREAK = Symbol('break visit');
|
||||
const SKIP = Symbol('skip children');
|
||||
const REMOVE = Symbol('remove node');
|
||||
/**
|
||||
* Apply a visitor to an AST node or document.
|
||||
*
|
||||
* Walks through the tree (depth-first) starting from `node`, calling a
|
||||
* `visitor` function with three arguments:
|
||||
* - `key`: For sequence values and map `Pair`, the node's index in the
|
||||
* collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
|
||||
* `null` for the root node.
|
||||
* - `node`: The current node.
|
||||
* - `path`: The ancestry of the current node.
|
||||
*
|
||||
* The return value of the visitor may be used to control the traversal:
|
||||
* - `undefined` (default): Do nothing and continue
|
||||
* - `visit.SKIP`: Do not visit the children of this node, continue with next
|
||||
* sibling
|
||||
* - `visit.BREAK`: Terminate traversal completely
|
||||
* - `visit.REMOVE`: Remove the current node, then continue with the next one
|
||||
* - `Node`: Replace the current node, then continue by visiting it
|
||||
* - `number`: While iterating the items of a sequence or map, set the index
|
||||
* of the next step. This is useful especially if the index of the current
|
||||
* node has changed.
|
||||
*
|
||||
* If `visitor` is a single function, it will be called with all values
|
||||
* encountered in the tree, including e.g. `null` values. Alternatively,
|
||||
* separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
|
||||
* `Alias` and `Scalar` node. To define the same visitor function for more than
|
||||
* one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
|
||||
* and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
|
||||
* specific defined one will be used for each node.
|
||||
*/
|
||||
function visit(node, visitor) {
|
||||
const visitor_ = initVisitor(visitor);
|
||||
if (isDocument(node)) {
|
||||
const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));
|
||||
if (cd === REMOVE)
|
||||
node.contents = null;
|
||||
}
|
||||
else
|
||||
visit_(null, node, visitor_, Object.freeze([]));
|
||||
}
|
||||
// Without the `as symbol` casts, TS declares these in the `visit`
|
||||
// namespace using `var`, but then complains about that because
|
||||
// `unique symbol` must be `const`.
|
||||
/** Terminate visit traversal completely */
|
||||
visit.BREAK = BREAK;
|
||||
/** Do not visit the children of the current node */
|
||||
visit.SKIP = SKIP;
|
||||
/** Remove the current node */
|
||||
visit.REMOVE = REMOVE;
|
||||
function visit_(key, node, visitor, path) {
|
||||
const ctrl = callVisitor(key, node, visitor, path);
|
||||
if (isNode(ctrl) || isPair(ctrl)) {
|
||||
replaceNode(key, path, ctrl);
|
||||
return visit_(key, ctrl, visitor, path);
|
||||
}
|
||||
if (typeof ctrl !== 'symbol') {
|
||||
if (isCollection(node)) {
|
||||
path = Object.freeze(path.concat(node));
|
||||
for (let i = 0; i < node.items.length; ++i) {
|
||||
const ci = visit_(i, node.items[i], visitor, path);
|
||||
if (typeof ci === 'number')
|
||||
i = ci - 1;
|
||||
else if (ci === BREAK)
|
||||
return BREAK;
|
||||
else if (ci === REMOVE) {
|
||||
node.items.splice(i, 1);
|
||||
i -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (isPair(node)) {
|
||||
path = Object.freeze(path.concat(node));
|
||||
const ck = visit_('key', node.key, visitor, path);
|
||||
if (ck === BREAK)
|
||||
return BREAK;
|
||||
else if (ck === REMOVE)
|
||||
node.key = null;
|
||||
const cv = visit_('value', node.value, visitor, path);
|
||||
if (cv === BREAK)
|
||||
return BREAK;
|
||||
else if (cv === REMOVE)
|
||||
node.value = null;
|
||||
}
|
||||
}
|
||||
return ctrl;
|
||||
}
|
||||
/**
|
||||
* Apply an async visitor to an AST node or document.
|
||||
*
|
||||
* Walks through the tree (depth-first) starting from `node`, calling a
|
||||
* `visitor` function with three arguments:
|
||||
* - `key`: For sequence values and map `Pair`, the node's index in the
|
||||
* collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.
|
||||
* `null` for the root node.
|
||||
* - `node`: The current node.
|
||||
* - `path`: The ancestry of the current node.
|
||||
*
|
||||
* The return value of the visitor may be used to control the traversal:
|
||||
* - `Promise`: Must resolve to one of the following values
|
||||
* - `undefined` (default): Do nothing and continue
|
||||
* - `visit.SKIP`: Do not visit the children of this node, continue with next
|
||||
* sibling
|
||||
* - `visit.BREAK`: Terminate traversal completely
|
||||
* - `visit.REMOVE`: Remove the current node, then continue with the next one
|
||||
* - `Node`: Replace the current node, then continue by visiting it
|
||||
* - `number`: While iterating the items of a sequence or map, set the index
|
||||
* of the next step. This is useful especially if the index of the current
|
||||
* node has changed.
|
||||
*
|
||||
* If `visitor` is a single function, it will be called with all values
|
||||
* encountered in the tree, including e.g. `null` values. Alternatively,
|
||||
* separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,
|
||||
* `Alias` and `Scalar` node. To define the same visitor function for more than
|
||||
* one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)
|
||||
* and `Node` (alias, map, seq & scalar) targets. Of all these, only the most
|
||||
* specific defined one will be used for each node.
|
||||
*/
|
||||
async function visitAsync(node, visitor) {
|
||||
const visitor_ = initVisitor(visitor);
|
||||
if (isDocument(node)) {
|
||||
const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));
|
||||
if (cd === REMOVE)
|
||||
node.contents = null;
|
||||
}
|
||||
else
|
||||
await visitAsync_(null, node, visitor_, Object.freeze([]));
|
||||
}
|
||||
// Without the `as symbol` casts, TS declares these in the `visit`
|
||||
// namespace using `var`, but then complains about that because
|
||||
// `unique symbol` must be `const`.
|
||||
/** Terminate visit traversal completely */
|
||||
visitAsync.BREAK = BREAK;
|
||||
/** Do not visit the children of the current node */
|
||||
visitAsync.SKIP = SKIP;
|
||||
/** Remove the current node */
|
||||
visitAsync.REMOVE = REMOVE;
|
||||
async function visitAsync_(key, node, visitor, path) {
|
||||
const ctrl = await callVisitor(key, node, visitor, path);
|
||||
if (isNode(ctrl) || isPair(ctrl)) {
|
||||
replaceNode(key, path, ctrl);
|
||||
return visitAsync_(key, ctrl, visitor, path);
|
||||
}
|
||||
if (typeof ctrl !== 'symbol') {
|
||||
if (isCollection(node)) {
|
||||
path = Object.freeze(path.concat(node));
|
||||
for (let i = 0; i < node.items.length; ++i) {
|
||||
const ci = await visitAsync_(i, node.items[i], visitor, path);
|
||||
if (typeof ci === 'number')
|
||||
i = ci - 1;
|
||||
else if (ci === BREAK)
|
||||
return BREAK;
|
||||
else if (ci === REMOVE) {
|
||||
node.items.splice(i, 1);
|
||||
i -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (isPair(node)) {
|
||||
path = Object.freeze(path.concat(node));
|
||||
const ck = await visitAsync_('key', node.key, visitor, path);
|
||||
if (ck === BREAK)
|
||||
return BREAK;
|
||||
else if (ck === REMOVE)
|
||||
node.key = null;
|
||||
const cv = await visitAsync_('value', node.value, visitor, path);
|
||||
if (cv === BREAK)
|
||||
return BREAK;
|
||||
else if (cv === REMOVE)
|
||||
node.value = null;
|
||||
}
|
||||
}
|
||||
return ctrl;
|
||||
}
|
||||
function initVisitor(visitor) {
|
||||
if (typeof visitor === 'object' &&
|
||||
(visitor.Collection || visitor.Node || visitor.Value)) {
|
||||
return Object.assign({
|
||||
Alias: visitor.Node,
|
||||
Map: visitor.Node,
|
||||
Scalar: visitor.Node,
|
||||
Seq: visitor.Node
|
||||
}, visitor.Value && {
|
||||
Map: visitor.Value,
|
||||
Scalar: visitor.Value,
|
||||
Seq: visitor.Value
|
||||
}, visitor.Collection && {
|
||||
Map: visitor.Collection,
|
||||
Seq: visitor.Collection
|
||||
}, visitor);
|
||||
}
|
||||
return visitor;
|
||||
}
|
||||
function callVisitor(key, node, visitor, path) {
|
||||
if (typeof visitor === 'function')
|
||||
return visitor(key, node, path);
|
||||
if (isMap(node))
|
||||
return visitor.Map?.(key, node, path);
|
||||
if (isSeq(node))
|
||||
return visitor.Seq?.(key, node, path);
|
||||
if (isPair(node))
|
||||
return visitor.Pair?.(key, node, path);
|
||||
if (isScalar(node))
|
||||
return visitor.Scalar?.(key, node, path);
|
||||
if (isAlias(node))
|
||||
return visitor.Alias?.(key, node, path);
|
||||
return undefined;
|
||||
}
|
||||
function replaceNode(key, path, node) {
|
||||
const parent = path[path.length - 1];
|
||||
if (isCollection(parent)) {
|
||||
parent.items[key] = node;
|
||||
}
|
||||
else if (isPair(parent)) {
|
||||
if (key === 'key')
|
||||
parent.key = node;
|
||||
else
|
||||
parent.value = node;
|
||||
}
|
||||
else if (isDocument(parent)) {
|
||||
parent.contents = node;
|
||||
}
|
||||
else {
|
||||
const pt = isAlias(parent) ? 'alias' : 'scalar';
|
||||
throw new Error(`Cannot replace node with ${pt} parent`);
|
||||
}
|
||||
}
|
||||
|
||||
export { visit, visitAsync };
|
Loading…
Add table
Add a link
Reference in a new issue