mirror of
https://github.com/deployphp/action.git
synced 2025-04-19 18:46:46 +00:00
Add node_modules
This commit is contained in:
parent
e1f786311a
commit
554eb0b122
994 changed files with 195567 additions and 0 deletions
214
node_modules/yaml/browser/dist/parse/cst-scalar.js
generated
vendored
Normal file
214
node_modules/yaml/browser/dist/parse/cst-scalar.js
generated
vendored
Normal file
|
@ -0,0 +1,214 @@
|
|||
import { resolveBlockScalar } from '../compose/resolve-block-scalar.js';
|
||||
import { resolveFlowScalar } from '../compose/resolve-flow-scalar.js';
|
||||
import { YAMLParseError } from '../errors.js';
|
||||
import { stringifyString } from '../stringify/stringifyString.js';
|
||||
|
||||
function resolveAsScalar(token, strict = true, onError) {
|
||||
if (token) {
|
||||
const _onError = (pos, code, message) => {
|
||||
const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;
|
||||
if (onError)
|
||||
onError(offset, code, message);
|
||||
else
|
||||
throw new YAMLParseError([offset, offset + 1], code, message);
|
||||
};
|
||||
switch (token.type) {
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
return resolveFlowScalar(token, strict, _onError);
|
||||
case 'block-scalar':
|
||||
return resolveBlockScalar(token, strict, _onError);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Create a new scalar token with `value`
|
||||
*
|
||||
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
|
||||
* as this function does not support any schema operations and won't check for such conflicts.
|
||||
*
|
||||
* @param value The string representation of the value, which will have its content properly indented.
|
||||
* @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.
|
||||
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
|
||||
* @param context.indent The indent level of the token.
|
||||
* @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.
|
||||
* @param context.offset The offset position of the token.
|
||||
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
|
||||
*/
|
||||
function createScalarToken(value, context) {
|
||||
const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;
|
||||
const source = stringifyString({ type, value }, {
|
||||
implicitKey,
|
||||
indent: indent > 0 ? ' '.repeat(indent) : '',
|
||||
inFlow,
|
||||
options: { blockQuote: true, lineWidth: -1 }
|
||||
});
|
||||
const end = context.end ?? [
|
||||
{ type: 'newline', offset: -1, indent, source: '\n' }
|
||||
];
|
||||
switch (source[0]) {
|
||||
case '|':
|
||||
case '>': {
|
||||
const he = source.indexOf('\n');
|
||||
const head = source.substring(0, he);
|
||||
const body = source.substring(he + 1) + '\n';
|
||||
const props = [
|
||||
{ type: 'block-scalar-header', offset, indent, source: head }
|
||||
];
|
||||
if (!addEndtoBlockProps(props, end))
|
||||
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
|
||||
return { type: 'block-scalar', offset, indent, props, source: body };
|
||||
}
|
||||
case '"':
|
||||
return { type: 'double-quoted-scalar', offset, indent, source, end };
|
||||
case "'":
|
||||
return { type: 'single-quoted-scalar', offset, indent, source, end };
|
||||
default:
|
||||
return { type: 'scalar', offset, indent, source, end };
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.
|
||||
*
|
||||
* Best efforts are made to retain any comments previously associated with the `token`,
|
||||
* though all contents within a collection's `items` will be overwritten.
|
||||
*
|
||||
* Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,
|
||||
* as this function does not support any schema operations and won't check for such conflicts.
|
||||
*
|
||||
* @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.
|
||||
* @param value The string representation of the value, which will have its content properly indented.
|
||||
* @param context.afterKey In most cases, values after a key should have an additional level of indentation.
|
||||
* @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.
|
||||
* @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.
|
||||
* @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.
|
||||
*/
|
||||
function setScalarValue(token, value, context = {}) {
|
||||
let { afterKey = false, implicitKey = false, inFlow = false, type } = context;
|
||||
let indent = 'indent' in token ? token.indent : null;
|
||||
if (afterKey && typeof indent === 'number')
|
||||
indent += 2;
|
||||
if (!type)
|
||||
switch (token.type) {
|
||||
case 'single-quoted-scalar':
|
||||
type = 'QUOTE_SINGLE';
|
||||
break;
|
||||
case 'double-quoted-scalar':
|
||||
type = 'QUOTE_DOUBLE';
|
||||
break;
|
||||
case 'block-scalar': {
|
||||
const header = token.props[0];
|
||||
if (header.type !== 'block-scalar-header')
|
||||
throw new Error('Invalid block scalar header');
|
||||
type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';
|
||||
break;
|
||||
}
|
||||
default:
|
||||
type = 'PLAIN';
|
||||
}
|
||||
const source = stringifyString({ type, value }, {
|
||||
implicitKey: implicitKey || indent === null,
|
||||
indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',
|
||||
inFlow,
|
||||
options: { blockQuote: true, lineWidth: -1 }
|
||||
});
|
||||
switch (source[0]) {
|
||||
case '|':
|
||||
case '>':
|
||||
setBlockScalarValue(token, source);
|
||||
break;
|
||||
case '"':
|
||||
setFlowScalarValue(token, source, 'double-quoted-scalar');
|
||||
break;
|
||||
case "'":
|
||||
setFlowScalarValue(token, source, 'single-quoted-scalar');
|
||||
break;
|
||||
default:
|
||||
setFlowScalarValue(token, source, 'scalar');
|
||||
}
|
||||
}
|
||||
function setBlockScalarValue(token, source) {
|
||||
const he = source.indexOf('\n');
|
||||
const head = source.substring(0, he);
|
||||
const body = source.substring(he + 1) + '\n';
|
||||
if (token.type === 'block-scalar') {
|
||||
const header = token.props[0];
|
||||
if (header.type !== 'block-scalar-header')
|
||||
throw new Error('Invalid block scalar header');
|
||||
header.source = head;
|
||||
token.source = body;
|
||||
}
|
||||
else {
|
||||
const { offset } = token;
|
||||
const indent = 'indent' in token ? token.indent : -1;
|
||||
const props = [
|
||||
{ type: 'block-scalar-header', offset, indent, source: head }
|
||||
];
|
||||
if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))
|
||||
props.push({ type: 'newline', offset: -1, indent, source: '\n' });
|
||||
for (const key of Object.keys(token))
|
||||
if (key !== 'type' && key !== 'offset')
|
||||
delete token[key];
|
||||
Object.assign(token, { type: 'block-scalar', indent, props, source: body });
|
||||
}
|
||||
}
|
||||
/** @returns `true` if last token is a newline */
|
||||
function addEndtoBlockProps(props, end) {
|
||||
if (end)
|
||||
for (const st of end)
|
||||
switch (st.type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
props.push(st);
|
||||
break;
|
||||
case 'newline':
|
||||
props.push(st);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function setFlowScalarValue(token, source, type) {
|
||||
switch (token.type) {
|
||||
case 'scalar':
|
||||
case 'double-quoted-scalar':
|
||||
case 'single-quoted-scalar':
|
||||
token.type = type;
|
||||
token.source = source;
|
||||
break;
|
||||
case 'block-scalar': {
|
||||
const end = token.props.slice(1);
|
||||
let oa = source.length;
|
||||
if (token.props[0].type === 'block-scalar-header')
|
||||
oa -= token.props[0].source.length;
|
||||
for (const tok of end)
|
||||
tok.offset += oa;
|
||||
delete token.props;
|
||||
Object.assign(token, { type, source, end });
|
||||
break;
|
||||
}
|
||||
case 'block-map':
|
||||
case 'block-seq': {
|
||||
const offset = token.offset + source.length;
|
||||
const nl = { type: 'newline', offset, indent: token.indent, source: '\n' };
|
||||
delete token.items;
|
||||
Object.assign(token, { type, source, end: [nl] });
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
const indent = 'indent' in token ? token.indent : -1;
|
||||
const end = 'end' in token && Array.isArray(token.end)
|
||||
? token.end.filter(st => st.type === 'space' ||
|
||||
st.type === 'comment' ||
|
||||
st.type === 'newline')
|
||||
: [];
|
||||
for (const key of Object.keys(token))
|
||||
if (key !== 'type' && key !== 'offset')
|
||||
delete token[key];
|
||||
Object.assign(token, { type, indent, source, end });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { createScalarToken, resolveAsScalar, setScalarValue };
|
61
node_modules/yaml/browser/dist/parse/cst-stringify.js
generated
vendored
Normal file
61
node_modules/yaml/browser/dist/parse/cst-stringify.js
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
|||
/**
|
||||
* Stringify a CST document, token, or collection item
|
||||
*
|
||||
* Fair warning: This applies no validation whatsoever, and
|
||||
* simply concatenates the sources in their logical order.
|
||||
*/
|
||||
const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);
|
||||
function stringifyToken(token) {
|
||||
switch (token.type) {
|
||||
case 'block-scalar': {
|
||||
let res = '';
|
||||
for (const tok of token.props)
|
||||
res += stringifyToken(tok);
|
||||
return res + token.source;
|
||||
}
|
||||
case 'block-map':
|
||||
case 'block-seq': {
|
||||
let res = '';
|
||||
for (const item of token.items)
|
||||
res += stringifyItem(item);
|
||||
return res;
|
||||
}
|
||||
case 'flow-collection': {
|
||||
let res = token.start.source;
|
||||
for (const item of token.items)
|
||||
res += stringifyItem(item);
|
||||
for (const st of token.end)
|
||||
res += st.source;
|
||||
return res;
|
||||
}
|
||||
case 'document': {
|
||||
let res = stringifyItem(token);
|
||||
if (token.end)
|
||||
for (const st of token.end)
|
||||
res += st.source;
|
||||
return res;
|
||||
}
|
||||
default: {
|
||||
let res = token.source;
|
||||
if ('end' in token && token.end)
|
||||
for (const st of token.end)
|
||||
res += st.source;
|
||||
return res;
|
||||
}
|
||||
}
|
||||
}
|
||||
function stringifyItem({ start, key, sep, value }) {
|
||||
let res = '';
|
||||
for (const st of start)
|
||||
res += st.source;
|
||||
if (key)
|
||||
res += stringifyToken(key);
|
||||
if (sep)
|
||||
for (const st of sep)
|
||||
res += st.source;
|
||||
if (value)
|
||||
res += stringifyToken(value);
|
||||
return res;
|
||||
}
|
||||
|
||||
export { stringify };
|
97
node_modules/yaml/browser/dist/parse/cst-visit.js
generated
vendored
Normal file
97
node_modules/yaml/browser/dist/parse/cst-visit.js
generated
vendored
Normal file
|
@ -0,0 +1,97 @@
|
|||
const BREAK = Symbol('break visit');
|
||||
const SKIP = Symbol('skip children');
|
||||
const REMOVE = Symbol('remove item');
|
||||
/**
|
||||
* Apply a visitor to a CST document or item.
|
||||
*
|
||||
* Walks through the tree (depth-first) starting from the root, calling a
|
||||
* `visitor` function with two arguments when entering each item:
|
||||
* - `item`: The current item, which included the following members:
|
||||
* - `start: SourceToken[]` – Source tokens before the key or value,
|
||||
* possibly including its anchor or tag.
|
||||
* - `key?: Token | null` – Set for pair values. May then be `null`, if
|
||||
* the key before the `:` separator is empty.
|
||||
* - `sep?: SourceToken[]` – Source tokens between the key and the value,
|
||||
* which should include the `:` map value indicator if `value` is set.
|
||||
* - `value?: Token` – The value of a sequence item, or of a map pair.
|
||||
* - `path`: The steps from the root to the current node, as an array of
|
||||
* `['key' | 'value', number]` tuples.
|
||||
*
|
||||
* The return value of the visitor may be used to control the traversal:
|
||||
* - `undefined` (default): Do nothing and continue
|
||||
* - `visit.SKIP`: Do not visit the children of this token, continue with
|
||||
* next sibling
|
||||
* - `visit.BREAK`: Terminate traversal completely
|
||||
* - `visit.REMOVE`: Remove the current item, then continue with the next one
|
||||
* - `number`: Set the index of the next step. This is useful especially if
|
||||
* the index of the current token has changed.
|
||||
* - `function`: Define the next visitor for this item. After the original
|
||||
* visitor is called on item entry, next visitors are called after handling
|
||||
* a non-empty `key` and when exiting the item.
|
||||
*/
|
||||
function visit(cst, visitor) {
|
||||
if ('type' in cst && cst.type === 'document')
|
||||
cst = { start: cst.start, value: cst.value };
|
||||
_visit(Object.freeze([]), cst, visitor);
|
||||
}
|
||||
// Without the `as symbol` casts, TS declares these in the `visit`
|
||||
// namespace using `var`, but then complains about that because
|
||||
// `unique symbol` must be `const`.
|
||||
/** Terminate visit traversal completely */
|
||||
visit.BREAK = BREAK;
|
||||
/** Do not visit the children of the current item */
|
||||
visit.SKIP = SKIP;
|
||||
/** Remove the current item */
|
||||
visit.REMOVE = REMOVE;
|
||||
/** Find the item at `path` from `cst` as the root */
|
||||
visit.itemAtPath = (cst, path) => {
|
||||
let item = cst;
|
||||
for (const [field, index] of path) {
|
||||
const tok = item?.[field];
|
||||
if (tok && 'items' in tok) {
|
||||
item = tok.items[index];
|
||||
}
|
||||
else
|
||||
return undefined;
|
||||
}
|
||||
return item;
|
||||
};
|
||||
/**
|
||||
* Get the immediate parent collection of the item at `path` from `cst` as the root.
|
||||
*
|
||||
* Throws an error if the collection is not found, which should never happen if the item itself exists.
|
||||
*/
|
||||
visit.parentCollection = (cst, path) => {
|
||||
const parent = visit.itemAtPath(cst, path.slice(0, -1));
|
||||
const field = path[path.length - 1][0];
|
||||
const coll = parent?.[field];
|
||||
if (coll && 'items' in coll)
|
||||
return coll;
|
||||
throw new Error('Parent collection not found');
|
||||
};
|
||||
function _visit(path, item, visitor) {
|
||||
let ctrl = visitor(item, path);
|
||||
if (typeof ctrl === 'symbol')
|
||||
return ctrl;
|
||||
for (const field of ['key', 'value']) {
|
||||
const token = item[field];
|
||||
if (token && 'items' in token) {
|
||||
for (let i = 0; i < token.items.length; ++i) {
|
||||
const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);
|
||||
if (typeof ci === 'number')
|
||||
i = ci - 1;
|
||||
else if (ci === BREAK)
|
||||
return BREAK;
|
||||
else if (ci === REMOVE) {
|
||||
token.items.splice(i, 1);
|
||||
i -= 1;
|
||||
}
|
||||
}
|
||||
if (typeof ctrl === 'function' && field === 'key')
|
||||
ctrl = ctrl(item, path);
|
||||
}
|
||||
}
|
||||
return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;
|
||||
}
|
||||
|
||||
export { visit };
|
98
node_modules/yaml/browser/dist/parse/cst.js
generated
vendored
Normal file
98
node_modules/yaml/browser/dist/parse/cst.js
generated
vendored
Normal file
|
@ -0,0 +1,98 @@
|
|||
export { createScalarToken, resolveAsScalar, setScalarValue } from './cst-scalar.js';
|
||||
export { stringify } from './cst-stringify.js';
|
||||
export { visit } from './cst-visit.js';
|
||||
|
||||
/** The byte order mark */
|
||||
const BOM = '\u{FEFF}';
|
||||
/** Start of doc-mode */
|
||||
const DOCUMENT = '\x02'; // C0: Start of Text
|
||||
/** Unexpected end of flow-mode */
|
||||
const FLOW_END = '\x18'; // C0: Cancel
|
||||
/** Next token is a scalar value */
|
||||
const SCALAR = '\x1f'; // C0: Unit Separator
|
||||
/** @returns `true` if `token` is a flow or block collection */
|
||||
const isCollection = (token) => !!token && 'items' in token;
|
||||
/** @returns `true` if `token` is a flow or block scalar; not an alias */
|
||||
const isScalar = (token) => !!token &&
|
||||
(token.type === 'scalar' ||
|
||||
token.type === 'single-quoted-scalar' ||
|
||||
token.type === 'double-quoted-scalar' ||
|
||||
token.type === 'block-scalar');
|
||||
/* istanbul ignore next */
|
||||
/** Get a printable representation of a lexer token */
|
||||
function prettyToken(token) {
|
||||
switch (token) {
|
||||
case BOM:
|
||||
return '<BOM>';
|
||||
case DOCUMENT:
|
||||
return '<DOC>';
|
||||
case FLOW_END:
|
||||
return '<FLOW_END>';
|
||||
case SCALAR:
|
||||
return '<SCALAR>';
|
||||
default:
|
||||
return JSON.stringify(token);
|
||||
}
|
||||
}
|
||||
/** Identify the type of a lexer token. May return `null` for unknown tokens. */
|
||||
function tokenType(source) {
|
||||
switch (source) {
|
||||
case BOM:
|
||||
return 'byte-order-mark';
|
||||
case DOCUMENT:
|
||||
return 'doc-mode';
|
||||
case FLOW_END:
|
||||
return 'flow-error-end';
|
||||
case SCALAR:
|
||||
return 'scalar';
|
||||
case '---':
|
||||
return 'doc-start';
|
||||
case '...':
|
||||
return 'doc-end';
|
||||
case '':
|
||||
case '\n':
|
||||
case '\r\n':
|
||||
return 'newline';
|
||||
case '-':
|
||||
return 'seq-item-ind';
|
||||
case '?':
|
||||
return 'explicit-key-ind';
|
||||
case ':':
|
||||
return 'map-value-ind';
|
||||
case '{':
|
||||
return 'flow-map-start';
|
||||
case '}':
|
||||
return 'flow-map-end';
|
||||
case '[':
|
||||
return 'flow-seq-start';
|
||||
case ']':
|
||||
return 'flow-seq-end';
|
||||
case ',':
|
||||
return 'comma';
|
||||
}
|
||||
switch (source[0]) {
|
||||
case ' ':
|
||||
case '\t':
|
||||
return 'space';
|
||||
case '#':
|
||||
return 'comment';
|
||||
case '%':
|
||||
return 'directive-line';
|
||||
case '*':
|
||||
return 'alias';
|
||||
case '&':
|
||||
return 'anchor';
|
||||
case '!':
|
||||
return 'tag';
|
||||
case "'":
|
||||
return 'single-quoted-scalar';
|
||||
case '"':
|
||||
return 'double-quoted-scalar';
|
||||
case '|':
|
||||
case '>':
|
||||
return 'block-scalar-header';
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export { BOM, DOCUMENT, FLOW_END, SCALAR, isCollection, isScalar, prettyToken, tokenType };
|
701
node_modules/yaml/browser/dist/parse/lexer.js
generated
vendored
Normal file
701
node_modules/yaml/browser/dist/parse/lexer.js
generated
vendored
Normal file
|
@ -0,0 +1,701 @@
|
|||
import { BOM, DOCUMENT, FLOW_END, SCALAR } from './cst.js';
|
||||
|
||||
/*
|
||||
START -> stream
|
||||
|
||||
stream
|
||||
directive -> line-end -> stream
|
||||
indent + line-end -> stream
|
||||
[else] -> line-start
|
||||
|
||||
line-end
|
||||
comment -> line-end
|
||||
newline -> .
|
||||
input-end -> END
|
||||
|
||||
line-start
|
||||
doc-start -> doc
|
||||
doc-end -> stream
|
||||
[else] -> indent -> block-start
|
||||
|
||||
block-start
|
||||
seq-item-start -> block-start
|
||||
explicit-key-start -> block-start
|
||||
map-value-start -> block-start
|
||||
[else] -> doc
|
||||
|
||||
doc
|
||||
line-end -> line-start
|
||||
spaces -> doc
|
||||
anchor -> doc
|
||||
tag -> doc
|
||||
flow-start -> flow -> doc
|
||||
flow-end -> error -> doc
|
||||
seq-item-start -> error -> doc
|
||||
explicit-key-start -> error -> doc
|
||||
map-value-start -> doc
|
||||
alias -> doc
|
||||
quote-start -> quoted-scalar -> doc
|
||||
block-scalar-header -> line-end -> block-scalar(min) -> line-start
|
||||
[else] -> plain-scalar(false, min) -> doc
|
||||
|
||||
flow
|
||||
line-end -> flow
|
||||
spaces -> flow
|
||||
anchor -> flow
|
||||
tag -> flow
|
||||
flow-start -> flow -> flow
|
||||
flow-end -> .
|
||||
seq-item-start -> error -> flow
|
||||
explicit-key-start -> flow
|
||||
map-value-start -> flow
|
||||
alias -> flow
|
||||
quote-start -> quoted-scalar -> flow
|
||||
comma -> flow
|
||||
[else] -> plain-scalar(true, 0) -> flow
|
||||
|
||||
quoted-scalar
|
||||
quote-end -> .
|
||||
[else] -> quoted-scalar
|
||||
|
||||
block-scalar(min)
|
||||
newline + peek(indent < min) -> .
|
||||
[else] -> block-scalar(min)
|
||||
|
||||
plain-scalar(is-flow, min)
|
||||
scalar-end(is-flow) -> .
|
||||
peek(newline + (indent < min)) -> .
|
||||
[else] -> plain-scalar(min)
|
||||
*/
|
||||
function isEmpty(ch) {
|
||||
switch (ch) {
|
||||
case undefined:
|
||||
case ' ':
|
||||
case '\n':
|
||||
case '\r':
|
||||
case '\t':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
const hexDigits = '0123456789ABCDEFabcdef'.split('');
|
||||
const tagChars = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()".split('');
|
||||
const invalidFlowScalarChars = ',[]{}'.split('');
|
||||
const invalidAnchorChars = ' ,[]{}\n\r\t'.split('');
|
||||
const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.includes(ch);
|
||||
/**
|
||||
* Splits an input string into lexical tokens, i.e. smaller strings that are
|
||||
* easily identifiable by `tokens.tokenType()`.
|
||||
*
|
||||
* Lexing starts always in a "stream" context. Incomplete input may be buffered
|
||||
* until a complete token can be emitted.
|
||||
*
|
||||
* In addition to slices of the original input, the following control characters
|
||||
* may also be emitted:
|
||||
*
|
||||
* - `\x02` (Start of Text): A document starts with the next token
|
||||
* - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error)
|
||||
* - `\x1f` (Unit Separator): Next token is a scalar value
|
||||
* - `\u{FEFF}` (Byte order mark): Emitted separately outside documents
|
||||
*/
|
||||
class Lexer {
|
||||
constructor() {
|
||||
/**
|
||||
* Flag indicating whether the end of the current buffer marks the end of
|
||||
* all input
|
||||
*/
|
||||
this.atEnd = false;
|
||||
/**
|
||||
* Explicit indent set in block scalar header, as an offset from the current
|
||||
* minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not
|
||||
* explicitly set.
|
||||
*/
|
||||
this.blockScalarIndent = -1;
|
||||
/**
|
||||
* Block scalars that include a + (keep) chomping indicator in their header
|
||||
* include trailing empty lines, which are otherwise excluded from the
|
||||
* scalar's contents.
|
||||
*/
|
||||
this.blockScalarKeep = false;
|
||||
/** Current input */
|
||||
this.buffer = '';
|
||||
/**
|
||||
* Flag noting whether the map value indicator : can immediately follow this
|
||||
* node within a flow context.
|
||||
*/
|
||||
this.flowKey = false;
|
||||
/** Count of surrounding flow collection levels. */
|
||||
this.flowLevel = 0;
|
||||
/**
|
||||
* Minimum level of indentation required for next lines to be parsed as a
|
||||
* part of the current scalar value.
|
||||
*/
|
||||
this.indentNext = 0;
|
||||
/** Indentation level of the current line. */
|
||||
this.indentValue = 0;
|
||||
/** Position of the next \n character. */
|
||||
this.lineEndPos = null;
|
||||
/** Stores the state of the lexer if reaching the end of incpomplete input */
|
||||
this.next = null;
|
||||
/** A pointer to `buffer`; the current position of the lexer. */
|
||||
this.pos = 0;
|
||||
}
|
||||
/**
|
||||
* Generate YAML tokens from the `source` string. If `incomplete`,
|
||||
* a part of the last line may be left as a buffer for the next call.
|
||||
*
|
||||
* @returns A generator of lexical tokens
|
||||
*/
|
||||
*lex(source, incomplete = false) {
|
||||
if (source) {
|
||||
this.buffer = this.buffer ? this.buffer + source : source;
|
||||
this.lineEndPos = null;
|
||||
}
|
||||
this.atEnd = !incomplete;
|
||||
let next = this.next ?? 'stream';
|
||||
while (next && (incomplete || this.hasChars(1)))
|
||||
next = yield* this.parseNext(next);
|
||||
}
|
||||
atLineEnd() {
|
||||
let i = this.pos;
|
||||
let ch = this.buffer[i];
|
||||
while (ch === ' ' || ch === '\t')
|
||||
ch = this.buffer[++i];
|
||||
if (!ch || ch === '#' || ch === '\n')
|
||||
return true;
|
||||
if (ch === '\r')
|
||||
return this.buffer[i + 1] === '\n';
|
||||
return false;
|
||||
}
|
||||
charAt(n) {
|
||||
return this.buffer[this.pos + n];
|
||||
}
|
||||
continueScalar(offset) {
|
||||
let ch = this.buffer[offset];
|
||||
if (this.indentNext > 0) {
|
||||
let indent = 0;
|
||||
while (ch === ' ')
|
||||
ch = this.buffer[++indent + offset];
|
||||
if (ch === '\r') {
|
||||
const next = this.buffer[indent + offset + 1];
|
||||
if (next === '\n' || (!next && !this.atEnd))
|
||||
return offset + indent + 1;
|
||||
}
|
||||
return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd)
|
||||
? offset + indent
|
||||
: -1;
|
||||
}
|
||||
if (ch === '-' || ch === '.') {
|
||||
const dt = this.buffer.substr(offset, 3);
|
||||
if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))
|
||||
return -1;
|
||||
}
|
||||
return offset;
|
||||
}
|
||||
getLine() {
|
||||
let end = this.lineEndPos;
|
||||
if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {
|
||||
end = this.buffer.indexOf('\n', this.pos);
|
||||
this.lineEndPos = end;
|
||||
}
|
||||
if (end === -1)
|
||||
return this.atEnd ? this.buffer.substring(this.pos) : null;
|
||||
if (this.buffer[end - 1] === '\r')
|
||||
end -= 1;
|
||||
return this.buffer.substring(this.pos, end);
|
||||
}
|
||||
hasChars(n) {
|
||||
return this.pos + n <= this.buffer.length;
|
||||
}
|
||||
setNext(state) {
|
||||
this.buffer = this.buffer.substring(this.pos);
|
||||
this.pos = 0;
|
||||
this.lineEndPos = null;
|
||||
this.next = state;
|
||||
return null;
|
||||
}
|
||||
peek(n) {
|
||||
return this.buffer.substr(this.pos, n);
|
||||
}
|
||||
*parseNext(next) {
|
||||
switch (next) {
|
||||
case 'stream':
|
||||
return yield* this.parseStream();
|
||||
case 'line-start':
|
||||
return yield* this.parseLineStart();
|
||||
case 'block-start':
|
||||
return yield* this.parseBlockStart();
|
||||
case 'doc':
|
||||
return yield* this.parseDocument();
|
||||
case 'flow':
|
||||
return yield* this.parseFlowCollection();
|
||||
case 'quoted-scalar':
|
||||
return yield* this.parseQuotedScalar();
|
||||
case 'block-scalar':
|
||||
return yield* this.parseBlockScalar();
|
||||
case 'plain-scalar':
|
||||
return yield* this.parsePlainScalar();
|
||||
}
|
||||
}
|
||||
*parseStream() {
|
||||
let line = this.getLine();
|
||||
if (line === null)
|
||||
return this.setNext('stream');
|
||||
if (line[0] === BOM) {
|
||||
yield* this.pushCount(1);
|
||||
line = line.substring(1);
|
||||
}
|
||||
if (line[0] === '%') {
|
||||
let dirEnd = line.length;
|
||||
const cs = line.indexOf('#');
|
||||
if (cs !== -1) {
|
||||
const ch = line[cs - 1];
|
||||
if (ch === ' ' || ch === '\t')
|
||||
dirEnd = cs - 1;
|
||||
}
|
||||
while (true) {
|
||||
const ch = line[dirEnd - 1];
|
||||
if (ch === ' ' || ch === '\t')
|
||||
dirEnd -= 1;
|
||||
else
|
||||
break;
|
||||
}
|
||||
const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));
|
||||
yield* this.pushCount(line.length - n); // possible comment
|
||||
this.pushNewline();
|
||||
return 'stream';
|
||||
}
|
||||
if (this.atLineEnd()) {
|
||||
const sp = yield* this.pushSpaces(true);
|
||||
yield* this.pushCount(line.length - sp);
|
||||
yield* this.pushNewline();
|
||||
return 'stream';
|
||||
}
|
||||
yield DOCUMENT;
|
||||
return yield* this.parseLineStart();
|
||||
}
|
||||
*parseLineStart() {
|
||||
const ch = this.charAt(0);
|
||||
if (!ch && !this.atEnd)
|
||||
return this.setNext('line-start');
|
||||
if (ch === '-' || ch === '.') {
|
||||
if (!this.atEnd && !this.hasChars(4))
|
||||
return this.setNext('line-start');
|
||||
const s = this.peek(3);
|
||||
if (s === '---' && isEmpty(this.charAt(3))) {
|
||||
yield* this.pushCount(3);
|
||||
this.indentValue = 0;
|
||||
this.indentNext = 0;
|
||||
return 'doc';
|
||||
}
|
||||
else if (s === '...' && isEmpty(this.charAt(3))) {
|
||||
yield* this.pushCount(3);
|
||||
return 'stream';
|
||||
}
|
||||
}
|
||||
this.indentValue = yield* this.pushSpaces(false);
|
||||
if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))
|
||||
this.indentNext = this.indentValue;
|
||||
return yield* this.parseBlockStart();
|
||||
}
|
||||
*parseBlockStart() {
|
||||
const [ch0, ch1] = this.peek(2);
|
||||
if (!ch1 && !this.atEnd)
|
||||
return this.setNext('block-start');
|
||||
if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {
|
||||
const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));
|
||||
this.indentNext = this.indentValue + 1;
|
||||
this.indentValue += n;
|
||||
return yield* this.parseBlockStart();
|
||||
}
|
||||
return 'doc';
|
||||
}
|
||||
*parseDocument() {
|
||||
yield* this.pushSpaces(true);
|
||||
const line = this.getLine();
|
||||
if (line === null)
|
||||
return this.setNext('doc');
|
||||
let n = yield* this.pushIndicators();
|
||||
switch (line[n]) {
|
||||
case '#':
|
||||
yield* this.pushCount(line.length - n);
|
||||
// fallthrough
|
||||
case undefined:
|
||||
yield* this.pushNewline();
|
||||
return yield* this.parseLineStart();
|
||||
case '{':
|
||||
case '[':
|
||||
yield* this.pushCount(1);
|
||||
this.flowKey = false;
|
||||
this.flowLevel = 1;
|
||||
return 'flow';
|
||||
case '}':
|
||||
case ']':
|
||||
// this is an error
|
||||
yield* this.pushCount(1);
|
||||
return 'doc';
|
||||
case '*':
|
||||
yield* this.pushUntil(isNotAnchorChar);
|
||||
return 'doc';
|
||||
case '"':
|
||||
case "'":
|
||||
return yield* this.parseQuotedScalar();
|
||||
case '|':
|
||||
case '>':
|
||||
n += yield* this.parseBlockScalarHeader();
|
||||
n += yield* this.pushSpaces(true);
|
||||
yield* this.pushCount(line.length - n);
|
||||
yield* this.pushNewline();
|
||||
return yield* this.parseBlockScalar();
|
||||
default:
|
||||
return yield* this.parsePlainScalar();
|
||||
}
|
||||
}
|
||||
*parseFlowCollection() {
|
||||
let nl, sp;
|
||||
let indent = -1;
|
||||
do {
|
||||
nl = yield* this.pushNewline();
|
||||
if (nl > 0) {
|
||||
sp = yield* this.pushSpaces(false);
|
||||
this.indentValue = indent = sp;
|
||||
}
|
||||
else {
|
||||
sp = 0;
|
||||
}
|
||||
sp += yield* this.pushSpaces(true);
|
||||
} while (nl + sp > 0);
|
||||
const line = this.getLine();
|
||||
if (line === null)
|
||||
return this.setNext('flow');
|
||||
if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||
|
||||
(indent === 0 &&
|
||||
(line.startsWith('---') || line.startsWith('...')) &&
|
||||
isEmpty(line[3]))) {
|
||||
// Allowing for the terminal ] or } at the same (rather than greater)
|
||||
// indent level as the initial [ or { is technically invalid, but
|
||||
// failing here would be surprising to users.
|
||||
const atFlowEndMarker = indent === this.indentNext - 1 &&
|
||||
this.flowLevel === 1 &&
|
||||
(line[0] === ']' || line[0] === '}');
|
||||
if (!atFlowEndMarker) {
|
||||
// this is an error
|
||||
this.flowLevel = 0;
|
||||
yield FLOW_END;
|
||||
return yield* this.parseLineStart();
|
||||
}
|
||||
}
|
||||
let n = 0;
|
||||
while (line[n] === ',') {
|
||||
n += yield* this.pushCount(1);
|
||||
n += yield* this.pushSpaces(true);
|
||||
this.flowKey = false;
|
||||
}
|
||||
n += yield* this.pushIndicators();
|
||||
switch (line[n]) {
|
||||
case undefined:
|
||||
return 'flow';
|
||||
case '#':
|
||||
yield* this.pushCount(line.length - n);
|
||||
return 'flow';
|
||||
case '{':
|
||||
case '[':
|
||||
yield* this.pushCount(1);
|
||||
this.flowKey = false;
|
||||
this.flowLevel += 1;
|
||||
return 'flow';
|
||||
case '}':
|
||||
case ']':
|
||||
yield* this.pushCount(1);
|
||||
this.flowKey = true;
|
||||
this.flowLevel -= 1;
|
||||
return this.flowLevel ? 'flow' : 'doc';
|
||||
case '*':
|
||||
yield* this.pushUntil(isNotAnchorChar);
|
||||
return 'flow';
|
||||
case '"':
|
||||
case "'":
|
||||
this.flowKey = true;
|
||||
return yield* this.parseQuotedScalar();
|
||||
case ':': {
|
||||
const next = this.charAt(1);
|
||||
if (this.flowKey || isEmpty(next) || next === ',') {
|
||||
this.flowKey = false;
|
||||
yield* this.pushCount(1);
|
||||
yield* this.pushSpaces(true);
|
||||
return 'flow';
|
||||
}
|
||||
}
|
||||
// fallthrough
|
||||
default:
|
||||
this.flowKey = false;
|
||||
return yield* this.parsePlainScalar();
|
||||
}
|
||||
}
|
||||
*parseQuotedScalar() {
|
||||
const quote = this.charAt(0);
|
||||
let end = this.buffer.indexOf(quote, this.pos + 1);
|
||||
if (quote === "'") {
|
||||
while (end !== -1 && this.buffer[end + 1] === "'")
|
||||
end = this.buffer.indexOf("'", end + 2);
|
||||
}
|
||||
else {
|
||||
// double-quote
|
||||
while (end !== -1) {
|
||||
let n = 0;
|
||||
while (this.buffer[end - 1 - n] === '\\')
|
||||
n += 1;
|
||||
if (n % 2 === 0)
|
||||
break;
|
||||
end = this.buffer.indexOf('"', end + 1);
|
||||
}
|
||||
}
|
||||
// Only looking for newlines within the quotes
|
||||
const qb = this.buffer.substring(0, end);
|
||||
let nl = qb.indexOf('\n', this.pos);
|
||||
if (nl !== -1) {
|
||||
while (nl !== -1) {
|
||||
const cs = this.continueScalar(nl + 1);
|
||||
if (cs === -1)
|
||||
break;
|
||||
nl = qb.indexOf('\n', cs);
|
||||
}
|
||||
if (nl !== -1) {
|
||||
// this is an error caused by an unexpected unindent
|
||||
end = nl - (qb[nl - 1] === '\r' ? 2 : 1);
|
||||
}
|
||||
}
|
||||
if (end === -1) {
|
||||
if (!this.atEnd)
|
||||
return this.setNext('quoted-scalar');
|
||||
end = this.buffer.length;
|
||||
}
|
||||
yield* this.pushToIndex(end + 1, false);
|
||||
return this.flowLevel ? 'flow' : 'doc';
|
||||
}
|
||||
*parseBlockScalarHeader() {
|
||||
this.blockScalarIndent = -1;
|
||||
this.blockScalarKeep = false;
|
||||
let i = this.pos;
|
||||
while (true) {
|
||||
const ch = this.buffer[++i];
|
||||
if (ch === '+')
|
||||
this.blockScalarKeep = true;
|
||||
else if (ch > '0' && ch <= '9')
|
||||
this.blockScalarIndent = Number(ch) - 1;
|
||||
else if (ch !== '-')
|
||||
break;
|
||||
}
|
||||
return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');
|
||||
}
|
||||
*parseBlockScalar() {
|
||||
let nl = this.pos - 1; // may be -1 if this.pos === 0
|
||||
let indent = 0;
|
||||
let ch;
|
||||
loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {
|
||||
switch (ch) {
|
||||
case ' ':
|
||||
indent += 1;
|
||||
break;
|
||||
case '\n':
|
||||
nl = i;
|
||||
indent = 0;
|
||||
break;
|
||||
case '\r': {
|
||||
const next = this.buffer[i + 1];
|
||||
if (!next && !this.atEnd)
|
||||
return this.setNext('block-scalar');
|
||||
if (next === '\n')
|
||||
break;
|
||||
} // fallthrough
|
||||
default:
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
if (!ch && !this.atEnd)
|
||||
return this.setNext('block-scalar');
|
||||
if (indent >= this.indentNext) {
|
||||
if (this.blockScalarIndent === -1)
|
||||
this.indentNext = indent;
|
||||
else
|
||||
this.indentNext += this.blockScalarIndent;
|
||||
do {
|
||||
const cs = this.continueScalar(nl + 1);
|
||||
if (cs === -1)
|
||||
break;
|
||||
nl = this.buffer.indexOf('\n', cs);
|
||||
} while (nl !== -1);
|
||||
if (nl === -1) {
|
||||
if (!this.atEnd)
|
||||
return this.setNext('block-scalar');
|
||||
nl = this.buffer.length;
|
||||
}
|
||||
}
|
||||
if (!this.blockScalarKeep) {
|
||||
do {
|
||||
let i = nl - 1;
|
||||
let ch = this.buffer[i];
|
||||
if (ch === '\r')
|
||||
ch = this.buffer[--i];
|
||||
const lastChar = i; // Drop the line if last char not more indented
|
||||
while (ch === ' ' || ch === '\t')
|
||||
ch = this.buffer[--i];
|
||||
if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar)
|
||||
nl = i;
|
||||
else
|
||||
break;
|
||||
} while (true);
|
||||
}
|
||||
yield SCALAR;
|
||||
yield* this.pushToIndex(nl + 1, true);
|
||||
return yield* this.parseLineStart();
|
||||
}
|
||||
*parsePlainScalar() {
|
||||
const inFlow = this.flowLevel > 0;
|
||||
let end = this.pos - 1;
|
||||
let i = this.pos - 1;
|
||||
let ch;
|
||||
while ((ch = this.buffer[++i])) {
|
||||
if (ch === ':') {
|
||||
const next = this.buffer[i + 1];
|
||||
if (isEmpty(next) || (inFlow && next === ','))
|
||||
break;
|
||||
end = i;
|
||||
}
|
||||
else if (isEmpty(ch)) {
|
||||
let next = this.buffer[i + 1];
|
||||
if (ch === '\r') {
|
||||
if (next === '\n') {
|
||||
i += 1;
|
||||
ch = '\n';
|
||||
next = this.buffer[i + 1];
|
||||
}
|
||||
else
|
||||
end = i;
|
||||
}
|
||||
if (next === '#' || (inFlow && invalidFlowScalarChars.includes(next)))
|
||||
break;
|
||||
if (ch === '\n') {
|
||||
const cs = this.continueScalar(i + 1);
|
||||
if (cs === -1)
|
||||
break;
|
||||
i = Math.max(i, cs - 2); // to advance, but still account for ' #'
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (inFlow && invalidFlowScalarChars.includes(ch))
|
||||
break;
|
||||
end = i;
|
||||
}
|
||||
}
|
||||
if (!ch && !this.atEnd)
|
||||
return this.setNext('plain-scalar');
|
||||
yield SCALAR;
|
||||
yield* this.pushToIndex(end + 1, true);
|
||||
return inFlow ? 'flow' : 'doc';
|
||||
}
|
||||
*pushCount(n) {
|
||||
if (n > 0) {
|
||||
yield this.buffer.substr(this.pos, n);
|
||||
this.pos += n;
|
||||
return n;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
*pushToIndex(i, allowEmpty) {
|
||||
const s = this.buffer.slice(this.pos, i);
|
||||
if (s) {
|
||||
yield s;
|
||||
this.pos += s.length;
|
||||
return s.length;
|
||||
}
|
||||
else if (allowEmpty)
|
||||
yield '';
|
||||
return 0;
|
||||
}
|
||||
*pushIndicators() {
|
||||
switch (this.charAt(0)) {
|
||||
case '!':
|
||||
return ((yield* this.pushTag()) +
|
||||
(yield* this.pushSpaces(true)) +
|
||||
(yield* this.pushIndicators()));
|
||||
case '&':
|
||||
return ((yield* this.pushUntil(isNotAnchorChar)) +
|
||||
(yield* this.pushSpaces(true)) +
|
||||
(yield* this.pushIndicators()));
|
||||
case '-': // this is an error
|
||||
case '?': // this is an error outside flow collections
|
||||
case ':': {
|
||||
const inFlow = this.flowLevel > 0;
|
||||
const ch1 = this.charAt(1);
|
||||
if (isEmpty(ch1) || (inFlow && invalidFlowScalarChars.includes(ch1))) {
|
||||
if (!inFlow)
|
||||
this.indentNext = this.indentValue + 1;
|
||||
else if (this.flowKey)
|
||||
this.flowKey = false;
|
||||
return ((yield* this.pushCount(1)) +
|
||||
(yield* this.pushSpaces(true)) +
|
||||
(yield* this.pushIndicators()));
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
*pushTag() {
|
||||
if (this.charAt(1) === '<') {
|
||||
let i = this.pos + 2;
|
||||
let ch = this.buffer[i];
|
||||
while (!isEmpty(ch) && ch !== '>')
|
||||
ch = this.buffer[++i];
|
||||
return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);
|
||||
}
|
||||
else {
|
||||
let i = this.pos + 1;
|
||||
let ch = this.buffer[i];
|
||||
while (ch) {
|
||||
if (tagChars.includes(ch))
|
||||
ch = this.buffer[++i];
|
||||
else if (ch === '%' &&
|
||||
hexDigits.includes(this.buffer[i + 1]) &&
|
||||
hexDigits.includes(this.buffer[i + 2])) {
|
||||
ch = this.buffer[(i += 3)];
|
||||
}
|
||||
else
|
||||
break;
|
||||
}
|
||||
return yield* this.pushToIndex(i, false);
|
||||
}
|
||||
}
|
||||
*pushNewline() {
|
||||
const ch = this.buffer[this.pos];
|
||||
if (ch === '\n')
|
||||
return yield* this.pushCount(1);
|
||||
else if (ch === '\r' && this.charAt(1) === '\n')
|
||||
return yield* this.pushCount(2);
|
||||
else
|
||||
return 0;
|
||||
}
|
||||
*pushSpaces(allowTabs) {
|
||||
let i = this.pos - 1;
|
||||
let ch;
|
||||
do {
|
||||
ch = this.buffer[++i];
|
||||
} while (ch === ' ' || (allowTabs && ch === '\t'));
|
||||
const n = i - this.pos;
|
||||
if (n > 0) {
|
||||
yield this.buffer.substr(this.pos, n);
|
||||
this.pos = i;
|
||||
}
|
||||
return n;
|
||||
}
|
||||
*pushUntil(test) {
|
||||
let i = this.pos;
|
||||
let ch = this.buffer[i];
|
||||
while (!test(ch))
|
||||
ch = this.buffer[++i];
|
||||
return yield* this.pushToIndex(i, false);
|
||||
}
|
||||
}
|
||||
|
||||
export { Lexer };
|
39
node_modules/yaml/browser/dist/parse/line-counter.js
generated
vendored
Normal file
39
node_modules/yaml/browser/dist/parse/line-counter.js
generated
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
/**
|
||||
* Tracks newlines during parsing in order to provide an efficient API for
|
||||
* determining the one-indexed `{ line, col }` position for any offset
|
||||
* within the input.
|
||||
*/
|
||||
class LineCounter {
|
||||
constructor() {
|
||||
this.lineStarts = [];
|
||||
/**
|
||||
* Should be called in ascending order. Otherwise, call
|
||||
* `lineCounter.lineStarts.sort()` before calling `linePos()`.
|
||||
*/
|
||||
this.addNewLine = (offset) => this.lineStarts.push(offset);
|
||||
/**
|
||||
* Performs a binary search and returns the 1-indexed { line, col }
|
||||
* position of `offset`. If `line === 0`, `addNewLine` has never been
|
||||
* called or `offset` is before the first known newline.
|
||||
*/
|
||||
this.linePos = (offset) => {
|
||||
let low = 0;
|
||||
let high = this.lineStarts.length;
|
||||
while (low < high) {
|
||||
const mid = (low + high) >> 1; // Math.floor((low + high) / 2)
|
||||
if (this.lineStarts[mid] < offset)
|
||||
low = mid + 1;
|
||||
else
|
||||
high = mid;
|
||||
}
|
||||
if (this.lineStarts[low] === offset)
|
||||
return { line: low + 1, col: 1 };
|
||||
if (low === 0)
|
||||
return { line: 0, col: offset };
|
||||
const start = this.lineStarts[low - 1];
|
||||
return { line: low, col: offset - start + 1 };
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export { LineCounter };
|
950
node_modules/yaml/browser/dist/parse/parser.js
generated
vendored
Normal file
950
node_modules/yaml/browser/dist/parse/parser.js
generated
vendored
Normal file
|
@ -0,0 +1,950 @@
|
|||
import { tokenType } from './cst.js';
|
||||
import { Lexer } from './lexer.js';
|
||||
|
||||
function includesToken(list, type) {
|
||||
for (let i = 0; i < list.length; ++i)
|
||||
if (list[i].type === type)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
function findNonEmptyIndex(list) {
|
||||
for (let i = 0; i < list.length; ++i) {
|
||||
switch (list[i].type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
break;
|
||||
default:
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
function isFlowToken(token) {
|
||||
switch (token?.type) {
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
case 'flow-collection':
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
function getPrevProps(parent) {
|
||||
switch (parent.type) {
|
||||
case 'document':
|
||||
return parent.start;
|
||||
case 'block-map': {
|
||||
const it = parent.items[parent.items.length - 1];
|
||||
return it.sep ?? it.start;
|
||||
}
|
||||
case 'block-seq':
|
||||
return parent.items[parent.items.length - 1].start;
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
return [];
|
||||
}
|
||||
}
|
||||
/** Note: May modify input array */
|
||||
function getFirstKeyStartProps(prev) {
|
||||
if (prev.length === 0)
|
||||
return [];
|
||||
let i = prev.length;
|
||||
loop: while (--i >= 0) {
|
||||
switch (prev[i].type) {
|
||||
case 'doc-start':
|
||||
case 'explicit-key-ind':
|
||||
case 'map-value-ind':
|
||||
case 'seq-item-ind':
|
||||
case 'newline':
|
||||
break loop;
|
||||
}
|
||||
}
|
||||
while (prev[++i]?.type === 'space') {
|
||||
/* loop */
|
||||
}
|
||||
return prev.splice(i, prev.length);
|
||||
}
|
||||
function fixFlowSeqItems(fc) {
|
||||
if (fc.start.type === 'flow-seq-start') {
|
||||
for (const it of fc.items) {
|
||||
if (it.sep &&
|
||||
!it.value &&
|
||||
!includesToken(it.start, 'explicit-key-ind') &&
|
||||
!includesToken(it.sep, 'map-value-ind')) {
|
||||
if (it.key)
|
||||
it.value = it.key;
|
||||
delete it.key;
|
||||
if (isFlowToken(it.value)) {
|
||||
if (it.value.end)
|
||||
Array.prototype.push.apply(it.value.end, it.sep);
|
||||
else
|
||||
it.value.end = it.sep;
|
||||
}
|
||||
else
|
||||
Array.prototype.push.apply(it.start, it.sep);
|
||||
delete it.sep;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* A YAML concrete syntax tree (CST) parser
|
||||
*
|
||||
* ```ts
|
||||
* const src: string = ...
|
||||
* for (const token of new Parser().parse(src)) {
|
||||
* // token: Token
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* To use the parser with a user-provided lexer:
|
||||
*
|
||||
* ```ts
|
||||
* function* parse(source: string, lexer: Lexer) {
|
||||
* const parser = new Parser()
|
||||
* for (const lexeme of lexer.lex(source))
|
||||
* yield* parser.next(lexeme)
|
||||
* yield* parser.end()
|
||||
* }
|
||||
*
|
||||
* const src: string = ...
|
||||
* const lexer = new Lexer()
|
||||
* for (const token of parse(src, lexer)) {
|
||||
* // token: Token
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
class Parser {
|
||||
/**
|
||||
* @param onNewLine - If defined, called separately with the start position of
|
||||
* each new line (in `parse()`, including the start of input).
|
||||
*/
|
||||
constructor(onNewLine) {
|
||||
/** If true, space and sequence indicators count as indentation */
|
||||
this.atNewLine = true;
|
||||
/** If true, next token is a scalar value */
|
||||
this.atScalar = false;
|
||||
/** Current indentation level */
|
||||
this.indent = 0;
|
||||
/** Current offset since the start of parsing */
|
||||
this.offset = 0;
|
||||
/** On the same line with a block map key */
|
||||
this.onKeyLine = false;
|
||||
/** Top indicates the node that's currently being built */
|
||||
this.stack = [];
|
||||
/** The source of the current token, set in parse() */
|
||||
this.source = '';
|
||||
/** The type of the current token, set in parse() */
|
||||
this.type = '';
|
||||
// Must be defined after `next()`
|
||||
this.lexer = new Lexer();
|
||||
this.onNewLine = onNewLine;
|
||||
}
|
||||
/**
|
||||
* Parse `source` as a YAML stream.
|
||||
* If `incomplete`, a part of the last line may be left as a buffer for the next call.
|
||||
*
|
||||
* Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.
|
||||
*
|
||||
* @returns A generator of tokens representing each directive, document, and other structure.
|
||||
*/
|
||||
*parse(source, incomplete = false) {
|
||||
if (this.onNewLine && this.offset === 0)
|
||||
this.onNewLine(0);
|
||||
for (const lexeme of this.lexer.lex(source, incomplete))
|
||||
yield* this.next(lexeme);
|
||||
if (!incomplete)
|
||||
yield* this.end();
|
||||
}
|
||||
/**
|
||||
* Advance the parser by the `source` of one lexical token.
|
||||
*/
|
||||
*next(source) {
|
||||
this.source = source;
|
||||
if (this.atScalar) {
|
||||
this.atScalar = false;
|
||||
yield* this.step();
|
||||
this.offset += source.length;
|
||||
return;
|
||||
}
|
||||
const type = tokenType(source);
|
||||
if (!type) {
|
||||
const message = `Not a YAML token: ${source}`;
|
||||
yield* this.pop({ type: 'error', offset: this.offset, message, source });
|
||||
this.offset += source.length;
|
||||
}
|
||||
else if (type === 'scalar') {
|
||||
this.atNewLine = false;
|
||||
this.atScalar = true;
|
||||
this.type = 'scalar';
|
||||
}
|
||||
else {
|
||||
this.type = type;
|
||||
yield* this.step();
|
||||
switch (type) {
|
||||
case 'newline':
|
||||
this.atNewLine = true;
|
||||
this.indent = 0;
|
||||
if (this.onNewLine)
|
||||
this.onNewLine(this.offset + source.length);
|
||||
break;
|
||||
case 'space':
|
||||
if (this.atNewLine && source[0] === ' ')
|
||||
this.indent += source.length;
|
||||
break;
|
||||
case 'explicit-key-ind':
|
||||
case 'map-value-ind':
|
||||
case 'seq-item-ind':
|
||||
if (this.atNewLine)
|
||||
this.indent += source.length;
|
||||
break;
|
||||
case 'doc-mode':
|
||||
case 'flow-error-end':
|
||||
return;
|
||||
default:
|
||||
this.atNewLine = false;
|
||||
}
|
||||
this.offset += source.length;
|
||||
}
|
||||
}
|
||||
/** Call at end of input to push out any remaining constructions */
|
||||
*end() {
|
||||
while (this.stack.length > 0)
|
||||
yield* this.pop();
|
||||
}
|
||||
get sourceToken() {
|
||||
const st = {
|
||||
type: this.type,
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
source: this.source
|
||||
};
|
||||
return st;
|
||||
}
|
||||
*step() {
|
||||
const top = this.peek(1);
|
||||
if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {
|
||||
while (this.stack.length > 0)
|
||||
yield* this.pop();
|
||||
this.stack.push({
|
||||
type: 'doc-end',
|
||||
offset: this.offset,
|
||||
source: this.source
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (!top)
|
||||
return yield* this.stream();
|
||||
switch (top.type) {
|
||||
case 'document':
|
||||
return yield* this.document(top);
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
return yield* this.scalar(top);
|
||||
case 'block-scalar':
|
||||
return yield* this.blockScalar(top);
|
||||
case 'block-map':
|
||||
return yield* this.blockMap(top);
|
||||
case 'block-seq':
|
||||
return yield* this.blockSequence(top);
|
||||
case 'flow-collection':
|
||||
return yield* this.flowCollection(top);
|
||||
case 'doc-end':
|
||||
return yield* this.documentEnd(top);
|
||||
}
|
||||
/* istanbul ignore next should not happen */
|
||||
yield* this.pop();
|
||||
}
|
||||
peek(n) {
|
||||
return this.stack[this.stack.length - n];
|
||||
}
|
||||
*pop(error) {
|
||||
const token = error ?? this.stack.pop();
|
||||
/* istanbul ignore if should not happen */
|
||||
if (!token) {
|
||||
const message = 'Tried to pop an empty stack';
|
||||
yield { type: 'error', offset: this.offset, source: '', message };
|
||||
}
|
||||
else if (this.stack.length === 0) {
|
||||
yield token;
|
||||
}
|
||||
else {
|
||||
const top = this.peek(1);
|
||||
if (token.type === 'block-scalar') {
|
||||
// Block scalars use their parent rather than header indent
|
||||
token.indent = 'indent' in top ? top.indent : 0;
|
||||
}
|
||||
else if (token.type === 'flow-collection' && top.type === 'document') {
|
||||
// Ignore all indent for top-level flow collections
|
||||
token.indent = 0;
|
||||
}
|
||||
if (token.type === 'flow-collection')
|
||||
fixFlowSeqItems(token);
|
||||
switch (top.type) {
|
||||
case 'document':
|
||||
top.value = token;
|
||||
break;
|
||||
case 'block-scalar':
|
||||
top.props.push(token); // error
|
||||
break;
|
||||
case 'block-map': {
|
||||
const it = top.items[top.items.length - 1];
|
||||
if (it.value) {
|
||||
top.items.push({ start: [], key: token, sep: [] });
|
||||
this.onKeyLine = true;
|
||||
return;
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.value = token;
|
||||
}
|
||||
else {
|
||||
Object.assign(it, { key: token, sep: [] });
|
||||
this.onKeyLine = !includesToken(it.start, 'explicit-key-ind');
|
||||
return;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'block-seq': {
|
||||
const it = top.items[top.items.length - 1];
|
||||
if (it.value)
|
||||
top.items.push({ start: [], value: token });
|
||||
else
|
||||
it.value = token;
|
||||
break;
|
||||
}
|
||||
case 'flow-collection': {
|
||||
const it = top.items[top.items.length - 1];
|
||||
if (!it || it.value)
|
||||
top.items.push({ start: [], key: token, sep: [] });
|
||||
else if (it.sep)
|
||||
it.value = token;
|
||||
else
|
||||
Object.assign(it, { key: token, sep: [] });
|
||||
return;
|
||||
}
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
yield* this.pop();
|
||||
yield* this.pop(token);
|
||||
}
|
||||
if ((top.type === 'document' ||
|
||||
top.type === 'block-map' ||
|
||||
top.type === 'block-seq') &&
|
||||
(token.type === 'block-map' || token.type === 'block-seq')) {
|
||||
const last = token.items[token.items.length - 1];
|
||||
if (last &&
|
||||
!last.sep &&
|
||||
!last.value &&
|
||||
last.start.length > 0 &&
|
||||
findNonEmptyIndex(last.start) === -1 &&
|
||||
(token.indent === 0 ||
|
||||
last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {
|
||||
if (top.type === 'document')
|
||||
top.end = last.start;
|
||||
else
|
||||
top.items.push({ start: last.start });
|
||||
token.items.splice(-1, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
*stream() {
|
||||
switch (this.type) {
|
||||
case 'directive-line':
|
||||
yield { type: 'directive', offset: this.offset, source: this.source };
|
||||
return;
|
||||
case 'byte-order-mark':
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
yield this.sourceToken;
|
||||
return;
|
||||
case 'doc-mode':
|
||||
case 'doc-start': {
|
||||
const doc = {
|
||||
type: 'document',
|
||||
offset: this.offset,
|
||||
start: []
|
||||
};
|
||||
if (this.type === 'doc-start')
|
||||
doc.start.push(this.sourceToken);
|
||||
this.stack.push(doc);
|
||||
return;
|
||||
}
|
||||
}
|
||||
yield {
|
||||
type: 'error',
|
||||
offset: this.offset,
|
||||
message: `Unexpected ${this.type} token in YAML stream`,
|
||||
source: this.source
|
||||
};
|
||||
}
|
||||
*document(doc) {
|
||||
if (doc.value)
|
||||
return yield* this.lineEnd(doc);
|
||||
switch (this.type) {
|
||||
case 'doc-start': {
|
||||
if (findNonEmptyIndex(doc.start) !== -1) {
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
else
|
||||
doc.start.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
doc.start.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
const bv = this.startBlockValue(doc);
|
||||
if (bv)
|
||||
this.stack.push(bv);
|
||||
else {
|
||||
yield {
|
||||
type: 'error',
|
||||
offset: this.offset,
|
||||
message: `Unexpected ${this.type} token in YAML document`,
|
||||
source: this.source
|
||||
};
|
||||
}
|
||||
}
|
||||
*scalar(scalar) {
|
||||
if (this.type === 'map-value-ind') {
|
||||
const prev = getPrevProps(this.peek(2));
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
let sep;
|
||||
if (scalar.end) {
|
||||
sep = scalar.end;
|
||||
sep.push(this.sourceToken);
|
||||
delete scalar.end;
|
||||
}
|
||||
else
|
||||
sep = [this.sourceToken];
|
||||
const map = {
|
||||
type: 'block-map',
|
||||
offset: scalar.offset,
|
||||
indent: scalar.indent,
|
||||
items: [{ start, key: scalar, sep }]
|
||||
};
|
||||
this.onKeyLine = true;
|
||||
this.stack[this.stack.length - 1] = map;
|
||||
}
|
||||
else
|
||||
yield* this.lineEnd(scalar);
|
||||
}
|
||||
*blockScalar(scalar) {
|
||||
switch (this.type) {
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
scalar.props.push(this.sourceToken);
|
||||
return;
|
||||
case 'scalar':
|
||||
scalar.source = this.source;
|
||||
// block-scalar source includes trailing newline
|
||||
this.atNewLine = true;
|
||||
this.indent = 0;
|
||||
if (this.onNewLine) {
|
||||
let nl = this.source.indexOf('\n') + 1;
|
||||
while (nl !== 0) {
|
||||
this.onNewLine(this.offset + nl);
|
||||
nl = this.source.indexOf('\n', nl) + 1;
|
||||
}
|
||||
}
|
||||
yield* this.pop();
|
||||
break;
|
||||
/* istanbul ignore next should not happen */
|
||||
default:
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
}
|
||||
*blockMap(map) {
|
||||
const it = map.items[map.items.length - 1];
|
||||
// it.sep is true-ish if pair already has key or : separator
|
||||
switch (this.type) {
|
||||
case 'newline':
|
||||
this.onKeyLine = false;
|
||||
if (it.value) {
|
||||
const end = 'end' in it.value ? it.value.end : undefined;
|
||||
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
|
||||
if (last?.type === 'comment')
|
||||
end?.push(this.sourceToken);
|
||||
else
|
||||
map.items.push({ start: [this.sourceToken] });
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
else {
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
case 'space':
|
||||
case 'comment':
|
||||
if (it.value) {
|
||||
map.items.push({ start: [this.sourceToken] });
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
else {
|
||||
if (this.atIndentedComment(it.start, map.indent)) {
|
||||
const prev = map.items[map.items.length - 2];
|
||||
const end = prev?.value?.end;
|
||||
if (Array.isArray(end)) {
|
||||
Array.prototype.push.apply(end, it.start);
|
||||
end.push(this.sourceToken);
|
||||
map.items.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (this.indent >= map.indent) {
|
||||
const atNextItem = !this.onKeyLine && this.indent === map.indent && it.sep;
|
||||
// For empty nodes, assign newline-separated not indented empty tokens to following node
|
||||
let start = [];
|
||||
if (atNextItem && it.sep && !it.value) {
|
||||
const nl = [];
|
||||
for (let i = 0; i < it.sep.length; ++i) {
|
||||
const st = it.sep[i];
|
||||
switch (st.type) {
|
||||
case 'newline':
|
||||
nl.push(i);
|
||||
break;
|
||||
case 'space':
|
||||
break;
|
||||
case 'comment':
|
||||
if (st.indent > map.indent)
|
||||
nl.length = 0;
|
||||
break;
|
||||
default:
|
||||
nl.length = 0;
|
||||
}
|
||||
}
|
||||
if (nl.length >= 2)
|
||||
start = it.sep.splice(nl[1]);
|
||||
}
|
||||
switch (this.type) {
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
if (atNextItem || it.value) {
|
||||
start.push(this.sourceToken);
|
||||
map.items.push({ start });
|
||||
this.onKeyLine = true;
|
||||
}
|
||||
else if (it.sep) {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
else {
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
case 'explicit-key-ind':
|
||||
if (!it.sep && !includesToken(it.start, 'explicit-key-ind')) {
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
else if (atNextItem || it.value) {
|
||||
start.push(this.sourceToken);
|
||||
map.items.push({ start });
|
||||
}
|
||||
else {
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
this.onKeyLine = true;
|
||||
return;
|
||||
case 'map-value-ind':
|
||||
if (includesToken(it.start, 'explicit-key-ind')) {
|
||||
if (!it.sep) {
|
||||
if (includesToken(it.start, 'newline')) {
|
||||
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else {
|
||||
const start = getFirstKeyStartProps(it.start);
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
}
|
||||
else if (it.value) {
|
||||
map.items.push({ start: [], key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else if (includesToken(it.sep, 'map-value-ind')) {
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
else if (isFlowToken(it.key) &&
|
||||
!includesToken(it.sep, 'newline')) {
|
||||
const start = getFirstKeyStartProps(it.start);
|
||||
const key = it.key;
|
||||
const sep = it.sep;
|
||||
sep.push(this.sourceToken);
|
||||
// @ts-expect-error type guard is wrong here
|
||||
delete it.key, delete it.sep;
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key, sep }]
|
||||
});
|
||||
}
|
||||
else if (start.length > 0) {
|
||||
// Not actually at next item
|
||||
it.sep = it.sep.concat(start, this.sourceToken);
|
||||
}
|
||||
else {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (!it.sep) {
|
||||
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else if (it.value || atNextItem) {
|
||||
map.items.push({ start, key: null, sep: [this.sourceToken] });
|
||||
}
|
||||
else if (includesToken(it.sep, 'map-value-ind')) {
|
||||
this.stack.push({
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start: [], key: null, sep: [this.sourceToken] }]
|
||||
});
|
||||
}
|
||||
else {
|
||||
it.sep.push(this.sourceToken);
|
||||
}
|
||||
}
|
||||
this.onKeyLine = true;
|
||||
return;
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar': {
|
||||
const fs = this.flowScalar(this.type);
|
||||
if (atNextItem || it.value) {
|
||||
map.items.push({ start, key: fs, sep: [] });
|
||||
this.onKeyLine = true;
|
||||
}
|
||||
else if (it.sep) {
|
||||
this.stack.push(fs);
|
||||
}
|
||||
else {
|
||||
Object.assign(it, { key: fs, sep: [] });
|
||||
this.onKeyLine = true;
|
||||
}
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
const bv = this.startBlockValue(map);
|
||||
if (bv) {
|
||||
if (atNextItem &&
|
||||
bv.type !== 'block-seq' &&
|
||||
includesToken(it.start, 'explicit-key-ind')) {
|
||||
map.items.push({ start });
|
||||
}
|
||||
this.stack.push(bv);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
*blockSequence(seq) {
|
||||
const it = seq.items[seq.items.length - 1];
|
||||
switch (this.type) {
|
||||
case 'newline':
|
||||
if (it.value) {
|
||||
const end = 'end' in it.value ? it.value.end : undefined;
|
||||
const last = Array.isArray(end) ? end[end.length - 1] : undefined;
|
||||
if (last?.type === 'comment')
|
||||
end?.push(this.sourceToken);
|
||||
else
|
||||
seq.items.push({ start: [this.sourceToken] });
|
||||
}
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'space':
|
||||
case 'comment':
|
||||
if (it.value)
|
||||
seq.items.push({ start: [this.sourceToken] });
|
||||
else {
|
||||
if (this.atIndentedComment(it.start, seq.indent)) {
|
||||
const prev = seq.items[seq.items.length - 2];
|
||||
const end = prev?.value?.end;
|
||||
if (Array.isArray(end)) {
|
||||
Array.prototype.push.apply(end, it.start);
|
||||
end.push(this.sourceToken);
|
||||
seq.items.pop();
|
||||
return;
|
||||
}
|
||||
}
|
||||
it.start.push(this.sourceToken);
|
||||
}
|
||||
return;
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
if (it.value || this.indent <= seq.indent)
|
||||
break;
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'seq-item-ind':
|
||||
if (this.indent !== seq.indent)
|
||||
break;
|
||||
if (it.value || includesToken(it.start, 'seq-item-ind'))
|
||||
seq.items.push({ start: [this.sourceToken] });
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
if (this.indent > seq.indent) {
|
||||
const bv = this.startBlockValue(seq);
|
||||
if (bv) {
|
||||
this.stack.push(bv);
|
||||
return;
|
||||
}
|
||||
}
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
*flowCollection(fc) {
|
||||
const it = fc.items[fc.items.length - 1];
|
||||
if (this.type === 'flow-error-end') {
|
||||
let top;
|
||||
do {
|
||||
yield* this.pop();
|
||||
top = this.peek(1);
|
||||
} while (top && top.type === 'flow-collection');
|
||||
}
|
||||
else if (fc.end.length === 0) {
|
||||
switch (this.type) {
|
||||
case 'comma':
|
||||
case 'explicit-key-ind':
|
||||
if (!it || it.sep)
|
||||
fc.items.push({ start: [this.sourceToken] });
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'map-value-ind':
|
||||
if (!it || it.value)
|
||||
fc.items.push({ start: [], key: null, sep: [this.sourceToken] });
|
||||
else if (it.sep)
|
||||
it.sep.push(this.sourceToken);
|
||||
else
|
||||
Object.assign(it, { key: null, sep: [this.sourceToken] });
|
||||
return;
|
||||
case 'space':
|
||||
case 'comment':
|
||||
case 'newline':
|
||||
case 'anchor':
|
||||
case 'tag':
|
||||
if (!it || it.value)
|
||||
fc.items.push({ start: [this.sourceToken] });
|
||||
else if (it.sep)
|
||||
it.sep.push(this.sourceToken);
|
||||
else
|
||||
it.start.push(this.sourceToken);
|
||||
return;
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar': {
|
||||
const fs = this.flowScalar(this.type);
|
||||
if (!it || it.value)
|
||||
fc.items.push({ start: [], key: fs, sep: [] });
|
||||
else if (it.sep)
|
||||
this.stack.push(fs);
|
||||
else
|
||||
Object.assign(it, { key: fs, sep: [] });
|
||||
return;
|
||||
}
|
||||
case 'flow-map-end':
|
||||
case 'flow-seq-end':
|
||||
fc.end.push(this.sourceToken);
|
||||
return;
|
||||
}
|
||||
const bv = this.startBlockValue(fc);
|
||||
/* istanbul ignore else should not happen */
|
||||
if (bv)
|
||||
this.stack.push(bv);
|
||||
else {
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
}
|
||||
else {
|
||||
const parent = this.peek(2);
|
||||
if (parent.type === 'block-map' &&
|
||||
((this.type === 'map-value-ind' && parent.indent === fc.indent) ||
|
||||
(this.type === 'newline' &&
|
||||
!parent.items[parent.items.length - 1].sep))) {
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
}
|
||||
else if (this.type === 'map-value-ind' &&
|
||||
parent.type !== 'flow-collection') {
|
||||
const prev = getPrevProps(parent);
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
fixFlowSeqItems(fc);
|
||||
const sep = fc.end.splice(1, fc.end.length);
|
||||
sep.push(this.sourceToken);
|
||||
const map = {
|
||||
type: 'block-map',
|
||||
offset: fc.offset,
|
||||
indent: fc.indent,
|
||||
items: [{ start, key: fc, sep }]
|
||||
};
|
||||
this.onKeyLine = true;
|
||||
this.stack[this.stack.length - 1] = map;
|
||||
}
|
||||
else {
|
||||
yield* this.lineEnd(fc);
|
||||
}
|
||||
}
|
||||
}
|
||||
flowScalar(type) {
|
||||
if (this.onNewLine) {
|
||||
let nl = this.source.indexOf('\n') + 1;
|
||||
while (nl !== 0) {
|
||||
this.onNewLine(this.offset + nl);
|
||||
nl = this.source.indexOf('\n', nl) + 1;
|
||||
}
|
||||
}
|
||||
return {
|
||||
type,
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
source: this.source
|
||||
};
|
||||
}
|
||||
startBlockValue(parent) {
|
||||
switch (this.type) {
|
||||
case 'alias':
|
||||
case 'scalar':
|
||||
case 'single-quoted-scalar':
|
||||
case 'double-quoted-scalar':
|
||||
return this.flowScalar(this.type);
|
||||
case 'block-scalar-header':
|
||||
return {
|
||||
type: 'block-scalar',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
props: [this.sourceToken],
|
||||
source: ''
|
||||
};
|
||||
case 'flow-map-start':
|
||||
case 'flow-seq-start':
|
||||
return {
|
||||
type: 'flow-collection',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
start: this.sourceToken,
|
||||
items: [],
|
||||
end: []
|
||||
};
|
||||
case 'seq-item-ind':
|
||||
return {
|
||||
type: 'block-seq',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start: [this.sourceToken] }]
|
||||
};
|
||||
case 'explicit-key-ind': {
|
||||
this.onKeyLine = true;
|
||||
const prev = getPrevProps(parent);
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
start.push(this.sourceToken);
|
||||
return {
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start }]
|
||||
};
|
||||
}
|
||||
case 'map-value-ind': {
|
||||
this.onKeyLine = true;
|
||||
const prev = getPrevProps(parent);
|
||||
const start = getFirstKeyStartProps(prev);
|
||||
return {
|
||||
type: 'block-map',
|
||||
offset: this.offset,
|
||||
indent: this.indent,
|
||||
items: [{ start, key: null, sep: [this.sourceToken] }]
|
||||
};
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
atIndentedComment(start, indent) {
|
||||
if (this.type !== 'comment')
|
||||
return false;
|
||||
if (this.indent <= indent)
|
||||
return false;
|
||||
return start.every(st => st.type === 'newline' || st.type === 'space');
|
||||
}
|
||||
*documentEnd(docEnd) {
|
||||
if (this.type !== 'doc-mode') {
|
||||
if (docEnd.end)
|
||||
docEnd.end.push(this.sourceToken);
|
||||
else
|
||||
docEnd.end = [this.sourceToken];
|
||||
if (this.type === 'newline')
|
||||
yield* this.pop();
|
||||
}
|
||||
}
|
||||
*lineEnd(token) {
|
||||
switch (this.type) {
|
||||
case 'comma':
|
||||
case 'doc-start':
|
||||
case 'doc-end':
|
||||
case 'flow-seq-end':
|
||||
case 'flow-map-end':
|
||||
case 'map-value-ind':
|
||||
yield* this.pop();
|
||||
yield* this.step();
|
||||
break;
|
||||
case 'newline':
|
||||
this.onKeyLine = false;
|
||||
// fallthrough
|
||||
case 'space':
|
||||
case 'comment':
|
||||
default:
|
||||
// all other values are errors
|
||||
if (token.end)
|
||||
token.end.push(this.sourceToken);
|
||||
else
|
||||
token.end = [this.sourceToken];
|
||||
if (this.type === 'newline')
|
||||
yield* this.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { Parser };
|
Loading…
Add table
Add a link
Reference in a new issue